diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 98405c36a028..d1b1652196e6 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.39.17-alpha +current_version = 0.39.28-alpha commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-[a-z]+)? diff --git a/.env b/.env index 4b7bcf5c6d5d..4d724fcbead3 100644 --- a/.env +++ b/.env @@ -10,7 +10,7 @@ ### SHARED ### -VERSION=0.39.17-alpha +VERSION=0.39.28-alpha # When using the airbyte-db via default docker image CONFIG_ROOT=/data @@ -70,10 +70,9 @@ JOB_MAIN_CONTAINER_MEMORY_LIMIT= ### LOGGING/MONITORING/TRACKING ### TRACKING_STRATEGY=segment +JOB_ERROR_REPORTING_STRATEGY=logging # Although not present as an env var, expected by Log4J configuration. LOG_LEVEL=INFO -# Although not present as an env var, helps Airbyte track job healthiness. -SENTRY_DSN="https://d4b03de0c4574c78999b8d58e55243dc@o1009025.ingest.sentry.io/6102835" ### APPLICATIONS ### @@ -83,6 +82,11 @@ MAX_SYNC_WORKERS=5 MAX_SPEC_WORKERS=5 MAX_CHECK_WORKERS=5 MAX_DISCOVER_WORKERS=5 +# Temporal Activity configuration +ACTIVITY_MAX_ATTEMPT= +ACTIVITY_INITIAL_DELAY_BETWEEN_ATTEMPTS_SECONDS= +ACTIVITY_MAX_DELAY_BETWEEN_ATTEMPTS_SECONDS= +WORKFLOW_FAILURE_RESTART_DELAY_SECONDS= ### FEATURE FLAGS ### diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 723b0b026802..69ac4bb6be35 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -4,3 +4,13 @@ ## Exclude the package(-lock).json from code ownership to prevent version bump PRs from triggering codeowners review /airbyte-webapp/package.json /airbyte-webapp/package-lock.json + +# CDK and SAT +/airbyte-cdk/ @airbytehq/api-connectors-dx +/airbyte-integrations/bases/source-acceptance-tests/ @airbytehq/api-connectors-dx +/airbyte-integrations/connector-templates/ @airbytehq/api-connectors-dx + + +# Protocol related items +/airbyte-protocol/ @airbytehq/protocol-reviewers +/docs/understanding-airbyte/airbyte-protocol.md @airbytehq/protocol-reviewers diff --git a/.github/actions/build-branch/action.yml b/.github/actions/build-branch/action.yml index d2fe11646adb..fff73d4b55c4 100644 --- a/.github/actions/build-branch/action.yml +++ b/.github/actions/build-branch/action.yml @@ -24,9 +24,9 @@ runs: with: java-version: "17" - - uses: actions/setup-node@v1 + - uses: actions/setup-node@v2 with: - node-version: "16.13.0" + node-version: "lts/gallium" - name: Set up CI Gradle Properties run: | diff --git a/.github/actions/start-aws-runner/action.yml b/.github/actions/start-aws-runner/action.yml index 7f268783fc60..c3b94df610b6 100644 --- a/.github/actions/start-aws-runner/action.yml +++ b/.github/actions/start-aws-runner/action.yml @@ -41,7 +41,7 @@ runs: aws-region: us-east-2 - name: Start EC2 runner id: start-ec2-runner - uses: supertopher/ec2-github-runner@base64v1.0.10 + uses: airbytehq/ec2-github-runner@base64v1.1.0 with: mode: start github-token: ${{ inputs.github-token }} @@ -49,6 +49,9 @@ runs: ec2-instance-type: ${{ inputs.ec2-instance-type }} subnet-id: ${{ inputs.subnet-id }} security-group-id: ${{ inputs.security-group-id }} + # this adds a label to group any EC2 runners spun up within the same action run + # this enables creating a pool of runners to run multiple/matrix jobs on in parallel + label: runner-pool-${{ github.run_id }} aws-resource-tags: > [ {"Key": "BuildType", "Value": "oss"}, diff --git a/.github/workflows/create-release.yml b/.github/workflows/create-release.yml index dfc267a57834..2cb8dfea3d49 100644 --- a/.github/workflows/create-release.yml +++ b/.github/workflows/create-release.yml @@ -27,9 +27,10 @@ jobs: -H "Accept: application/vnd.github.v3+json" \ -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" \ https://api.github.com/repos/${{ github.repository }}/commits/$COMMIT_ID/pulls) - # the printf helps escape characters so that jq can parse the output. - # the sed removes carriage returns so that the body is easier to parse later. - PR_BODY=$(printf '%s' "$PR" | jq '.[0].body' | sed 's/\\r//g') + # The printf helps escape characters so that jq can parse the output. + # The sed removes carriage returns so that the body is easier to parse later, and + # escapes backticks so that they are not executed as commands. + PR_BODY=$(printf '%s' "$PR" | jq '.[0].body' | sed 's/\\r//g' | sed 's/`/\\`/g') echo ::set-output name=pr_body::${PR_BODY} - name: Extract Changelog id: extract_changelog diff --git a/.github/workflows/fe-validate-links.yml b/.github/workflows/fe-validate-links.yml index d16227da7e90..5ade0e19cee2 100644 --- a/.github/workflows/fe-validate-links.yml +++ b/.github/workflows/fe-validate-links.yml @@ -18,9 +18,9 @@ jobs: with: java-version: "17" - - uses: actions/setup-node@v1 + - uses: actions/setup-node@v2 with: - node-version: "16.13.0" + node-version: "lts/gallium" - name: Set up CI Gradle Properties run: | diff --git a/.github/workflows/gke-kube-test-command.yml b/.github/workflows/gke-kube-test-command.yml index 72c5054fdfe6..9c317d052e05 100644 --- a/.github/workflows/gke-kube-test-command.yml +++ b/.github/workflows/gke-kube-test-command.yml @@ -78,9 +78,9 @@ jobs: with: java-version: "17" - - uses: actions/setup-node@v1 + - uses: actions/setup-node@v2 with: - node-version: "16.13.0" + node-version: "lts/gallium" - name: Fix EC-2 Runner run: | diff --git a/.github/workflows/gradle.yml b/.github/workflows/gradle.yml index 3566372ccbfc..e85eb8665410 100644 --- a/.github/workflows/gradle.yml +++ b/.github/workflows/gradle.yml @@ -193,9 +193,9 @@ jobs: with: java-version: "17" - - uses: actions/setup-node@v1 + - uses: actions/setup-node@v2 with: - node-version: "16.13.0" + node-version: "lts/gallium" - uses: actions/setup-python@v2 with: @@ -304,9 +304,9 @@ jobs: with: java-version: "17" - - uses: actions/setup-node@v1 + - uses: actions/setup-node@v2 with: - node-version: "16.13.0" + node-version: "lts/gallium" - name: Set up CI Gradle Properties run: | @@ -343,9 +343,9 @@ jobs: with: java-version: "17" - - uses: actions/setup-node@v1 + - uses: actions/setup-node@v2 with: - node-version: "16.13.0" + node-version: "lts/gallium" - name: Set up CI Gradle Properties run: | @@ -441,9 +441,9 @@ jobs: with: java-version: "17" - - uses: actions/setup-node@v1 + - uses: actions/setup-node@v2 with: - node-version: "16.13.0" + node-version: "lts/gallium" - name: Set up CI Gradle Properties run: | @@ -569,9 +569,9 @@ jobs: with: java-version: "17" - - uses: actions/setup-node@v1 + - uses: actions/setup-node@v2 with: - node-version: "16.13.0" + node-version: "lts/gallium" - name: Fix EC-2 Runner run: | diff --git a/.github/workflows/publish-command.yml b/.github/workflows/publish-command.yml index 22890633a869..afc297e67df8 100644 --- a/.github/workflows/publish-command.yml +++ b/.github/workflows/publish-command.yml @@ -13,10 +13,6 @@ on: connector: description: "Airbyte Connector" required: true - run-tests: - description: "Should run tests when publishing" - required: true - default: "true" comment-id: description: "The comment-id of the slash command. Used to update the comment with the status." required: false @@ -24,6 +20,10 @@ on: description: "after publishing, the workflow will automatically bump the connector version in definitions and generate seed spec" required: true default: "true" + parallel: + description: "Switching this to true will spin up 5 build agents instead of 1 and allow multi connector publishes to run in parallel" + required: true + default: "false" jobs: find_valid_pat: @@ -45,8 +45,8 @@ jobs: ${{ secrets.DAVINCHIA_PAT }} ## Gradle Build # In case of self-hosted EC2 errors, remove this block. - start-publish-image-runner: - name: Start Build EC2 Runner + start-publish-image-runner-0: + name: Start Build EC2 Runner 0 runs-on: ubuntu-latest needs: find_valid_pat outputs: @@ -65,19 +65,154 @@ jobs: aws-access-key-id: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }} github-token: ${{ needs.find_valid_pat.outputs.pat }} - publish-image: - timeout-minutes: 240 - needs: start-publish-image-runner - runs-on: ${{ needs.start-publish-image-runner.outputs.label }} - environment: more-secrets + label: ${{ github.run_id }}-publisher + start-publish-image-runner-1: + if: github.event.inputs.parallel == 'true' && success() + name: Start Build EC2 Runner 1 + runs-on: ubuntu-latest + needs: find_valid_pat + outputs: + label: ${{ steps.start-ec2-runner.outputs.label }} + ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }} + steps: + - name: Checkout Airbyte + uses: actions/checkout@v2 + with: + repository: ${{ github.event.inputs.repo }} + ref: ${{ github.event.inputs.gitref }} + - name: Start AWS Runner + id: start-ec2-runner + uses: ./.github/actions/start-aws-runner + with: + aws-access-key-id: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }} + github-token: ${{ needs.find_valid_pat.outputs.pat }} + label: ${{ github.run_id }}-publisher + start-publish-image-runner-2: + if: github.event.inputs.parallel == 'true' && success() + name: Start Build EC2 Runner 2 + runs-on: ubuntu-latest + needs: find_valid_pat + outputs: + label: ${{ steps.start-ec2-runner.outputs.label }} + ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }} + steps: + - name: Checkout Airbyte + uses: actions/checkout@v2 + with: + repository: ${{ github.event.inputs.repo }} + ref: ${{ github.event.inputs.gitref }} + - name: Start AWS Runner + id: start-ec2-runner + uses: ./.github/actions/start-aws-runner + with: + aws-access-key-id: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }} + github-token: ${{ needs.find_valid_pat.outputs.pat }} + label: ${{ github.run_id }}-publisher + start-publish-image-runner-3: + if: github.event.inputs.parallel == 'true' && success() + name: Start Build EC2 Runner 3 + runs-on: ubuntu-latest + needs: find_valid_pat + outputs: + label: ${{ steps.start-ec2-runner.outputs.label }} + ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }} + steps: + - name: Checkout Airbyte + uses: actions/checkout@v2 + with: + repository: ${{ github.event.inputs.repo }} + ref: ${{ github.event.inputs.gitref }} + - name: Start AWS Runner + id: start-ec2-runner + uses: ./.github/actions/start-aws-runner + with: + aws-access-key-id: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }} + github-token: ${{ needs.find_valid_pat.outputs.pat }} + label: ${{ github.run_id }}-publisher + start-publish-image-runner-4: + if: github.event.inputs.parallel == 'true' && success() + name: Start Build EC2 Runner 4 + runs-on: ubuntu-latest + needs: find_valid_pat + outputs: + label: ${{ steps.start-ec2-runner.outputs.label }} + ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }} + steps: + - name: Checkout Airbyte + uses: actions/checkout@v2 + with: + repository: ${{ github.event.inputs.repo }} + ref: ${{ github.event.inputs.gitref }} + - name: Start AWS Runner + id: start-ec2-runner + uses: ./.github/actions/start-aws-runner + with: + aws-access-key-id: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }} + github-token: ${{ needs.find_valid_pat.outputs.pat }} + label: ${{ github.run_id }}-publisher + preprocess-matrix: + needs: start-publish-image-runner-0 + runs-on: ${{ needs.start-publish-image-runner-0.outputs.label }} + outputs: + connectorjson: ${{ steps.preprocess.outputs.connectorjson }} + steps: + # given a string input of a single connector or comma separated list of connectors e.g. connector1, connector2 + # this step builds an array, by removing whitespace, add in quotation marks around connectors and braces [ ] at the start and end + # finally, it sets it as output from this job so we can use this array of connectors as our matrix strategy for publishing + - id: preprocess + run: | + start="[\"" + replace="\",\"" + end="\"]" + stripped_connector="$(echo "${{ github.event.inputs.connector }}" | tr -d ' ')" + middle=${stripped_connector//,/$replace} + full="$start$middle$end" + echo "::set-output name=connectorjson::$full" + write-initial-output-to-comment: + name: Set up git comment + if: github.event.inputs.comment-id + needs: start-publish-image-runner-0 + runs-on: ${{ needs.start-publish-image-runner-0.outputs.label }} steps: - - name: Link comment to workflow run - if: github.event.inputs.comment-id + - name: Print start message + if: github.event.inputs.comment-id && success() + uses: peter-evans/create-or-update-comment@v1 + with: + comment-id: ${{ github.event.inputs.comment-id }} + body: | + > :clock2: Publishing the following connectors:
${{ github.event.inputs.connector }}
https://github.com/${{github.repository}}/actions/runs/${{github.run_id}} + - name: Create table header + uses: peter-evans/create-or-update-comment@v1 + with: + comment-id: ${{ github.event.inputs.comment-id }} + body: | +
+ + | Connector | Did it publish? | Were definitions generated? | + - name: Create table separator uses: peter-evans/create-or-update-comment@v1 with: comment-id: ${{ github.event.inputs.comment-id }} body: | - > :clock2: ${{github.event.inputs.connector}} https://github.com/${{github.repository}}/actions/runs/${{github.run_id}} + | --- | --- | --- | + publish-image: + timeout-minutes: 240 + needs: + - start-publish-image-runner-0 + - preprocess-matrix + - write-initial-output-to-comment + strategy: + max-parallel: 5 + fail-fast: false + matrix: + connector: ${{ fromJSON(needs.preprocess-matrix.outputs.connectorjson) }} + runs-on: runner-pool-${{ github.run_id }} + environment: more-secrets + steps: - name: Set up Cloud SDK uses: google-github-actions/setup-gcloud@v0 with: @@ -89,9 +224,9 @@ jobs: with: regex_pattern: "^(connectors|bases)/[a-zA-Z0-9-_]+$" regex_flags: "i" # required to be set for this plugin - search_string: ${{ github.event.inputs.connector }} + search_string: ${{ matrix.connector }} - name: Validate input workflow format - if: steps.regex.outputs.first_match != github.event.inputs.connector + if: steps.regex.outputs.first_match != matrix.connector run: echo "The connector provided has an invalid format!" && exit 1 - name: Checkout Airbyte uses: actions/checkout@v2 @@ -110,6 +245,7 @@ jobs: - name: Install Pyenv and Tox run: | python3 -m pip install --quiet virtualenv==16.7.9 --user + rm -r venv || echo "no pre-existing venv" python3 -m virtualenv venv source venv/bin/activate pip install --quiet tox==3.24.4 @@ -126,73 +262,43 @@ jobs: source venv/bin/activate tox -r -c ./tools/tox_ci.ini pip install --quiet -e ./tools/ci_* - - name: Write Integration Test Credentials for ${{ github.event.inputs.connector }} + - name: Write Integration Test Credentials for ${{ matrix.connector }} run: | source venv/bin/activate - ci_credentials ${{ github.event.inputs.connector }} + ci_credentials ${{ matrix.connector }} env: GCP_GSM_CREDENTIALS: ${{ secrets.GCP_GSM_CREDENTIALS }} - name: Set Name and Version Environment Vars - if: startsWith(github.event.inputs.connector, 'connectors') + if: startsWith(matrix.connector, 'connectors') run: | source tools/lib/lib.sh - DOCKERFILE=airbyte-integrations/${{ github.event.inputs.connector }}/Dockerfile - echo "IMAGE_NAME=$(echo ${{ github.event.inputs.connector }} | cut -d"/" -f2)" >> $GITHUB_ENV + DOCKERFILE=airbyte-integrations/${{ matrix.connector }}/Dockerfile + echo "IMAGE_NAME=$(echo ${{ matrix.connector }} | cut -d"/" -f2)" >> $GITHUB_ENV echo "IMAGE_VERSION=$(_get_docker_image_version ${DOCKERFILE})" >> $GITHUB_ENV - name: Prepare Sentry - if: startsWith(github.event.inputs.connector, 'connectors') - run: | - curl -sL https://sentry.io/get-cli/ | bash - - name: Create Sentry Release - if: startsWith(github.event.inputs.connector, 'connectors') + if: startsWith(matrix.connector, 'connectors') run: | - sentry-cli releases set-commits "${{ env.IMAGE_NAME }}@${{ env.IMAGE_VERSION }}" --auto --ignore-missing - env: - SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_CONNECTOR_RELEASE_AUTH_TOKEN }} - SENTRY_ORG: airbyte-5j - SENTRY_PROJECT: airbyte-connectors - - name: Publish ${{ github.event.inputs.connector }} + curl -sL https://sentry.io/get-cli/ | bash || echo "sentry cli already installed" + - name: Publish ${{ matrix.connector }} run: | echo "$SPEC_CACHE_SERVICE_ACCOUNT_KEY" > spec_cache_key_file.json && docker login -u ${DOCKER_HUB_USERNAME} -p ${DOCKER_HUB_PASSWORD} - ./tools/integrations/manage.sh publish airbyte-integrations/${{ github.event.inputs.connector }} ${{ github.event.inputs.run-tests }} --publish_spec_to_cache + ./tools/integrations/manage.sh publish airbyte-integrations/${{ matrix.connector }} true --publish_spec_to_cache id: publish env: DOCKER_HUB_USERNAME: ${{ secrets.DOCKER_HUB_USERNAME }} DOCKER_HUB_PASSWORD: ${{ secrets.DOCKER_HUB_PASSWORD }} # Oracle expects this variable to be set. Although usually present, this is not set by default on Github virtual runners. TZ: UTC - - name: Finalize Sentry release - if: startsWith(github.event.inputs.connector, 'connectors') + - name: Create Sentry Release + if: startsWith(matrix.connector, 'connectors') && success() run: | - sentry-cli releases finalize "${{ env.IMAGE_NAME }}@${{ env.IMAGE_VERSION }}" + SENTRY_RELEASE_NAME="airbyte-${{ env.IMAGE_NAME }}@${{ env.IMAGE_VERSION }}" + sentry-cli releases set-commits "$SENTRY_RELEASE_NAME" --auto --ignore-missing && + sentry-cli releases finalize "$SENTRY_RELEASE_NAME" env: SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_CONNECTOR_RELEASE_AUTH_TOKEN }} - SENTRY_ORG: airbyte-5j - SENTRY_PROJECT: airbyte-connectors - - name: Add Published Success Comment - if: github.event.inputs.comment-id && success() - uses: peter-evans/create-or-update-comment@v1 - with: - comment-id: ${{ github.event.inputs.comment-id }} - body: | - > :rocket: Successfully published ${{github.event.inputs.connector}} - - name: Add Published Failure Comment - if: github.event.inputs.comment-id && !success() - uses: peter-evans/create-or-update-comment@v1 - with: - comment-id: ${{ github.event.inputs.comment-id }} - body: | - > :x: Failed to publish ${{github.event.inputs.connector}} - - name: Slack Notification - Failure - if: failure() - uses: rtCamp/action-slack-notify@master - env: - SLACK_WEBHOOK: ${{ secrets.BUILD_SLACK_WEBHOOK }} - SLACK_USERNAME: Buildozer - SLACK_ICON: https://avatars.slack-edge.com/temp/2020-09-01/1342729352468_209b10acd6ff13a649a1.jpg - SLACK_COLOR: DC143C - SLACK_TITLE: "Failed to publish connector ${{ github.event.inputs.connector }} from branch ${{ github.ref }}" - SLACK_FOOTER: "" + SENTRY_ORG: airbytehq + SENTRY_PROJECT: connector-incident-management - name: Check if connector in definitions yaml if: github.event.inputs.auto-bump-version == 'true' && success() run: | @@ -230,36 +336,103 @@ jobs: git commit -m "auto-bump connector version" git pull origin ${{ github.event.inputs.gitref }} git push origin ${{ github.event.inputs.gitref }} - - name: Add Version Bump Success Comment - if: github.event.inputs.comment-id && github.event.inputs.auto-bump-version == 'true' && success() - uses: peter-evans/create-or-update-comment@v1 - with: - comment-id: ${{ github.event.inputs.comment-id }} - body: | - > :rocket: Auto-bumped version for ${{github.event.inputs.connector}} - - name: Add Version Bump Failure Comment - if: github.event.inputs.comment-id && github.event.inputs.auto-bump-version == 'true' && !success() + id: auto-bump + - name: Process outcomes into emojis + if: ${{ always() && github.event.inputs.comment-id }} + run: | + if [[ ${{ steps.publish.outcome }} = "success" ]]; then + echo "PUBLISH_OUTCOME=:white_check_mark:" >> $GITHUB_ENV + else + echo "PUBLISH_OUTCOME=:x:" >> $GITHUB_ENV + fi + if [[ ${{ steps.auto-bump.outcome }} = "success" ]]; then + echo "AUTO_BUMP_OUTCOME=:white_check_mark:" >> $GITHUB_ENV + else + echo "AUTO_BUMP_OUTCOME=:x:" >> $GITHUB_ENV + fi + - name: Add connector outcome line to table + if: ${{ always() && github.event.inputs.comment-id }} uses: peter-evans/create-or-update-comment@v1 with: comment-id: ${{ github.event.inputs.comment-id }} body: | - > :x: Couldn't auto-bump version for ${{github.event.inputs.connector}} - - name: Add Final Success Comment - if: github.event.inputs.comment-id && success() + | ${{ matrix.connector }} | ${{ env.PUBLISH_OUTCOME }} | ${{ env.AUTO_BUMP_OUTCOME }} | + add-helpful-info-to-git-comment: + if: ${{ always() && github.event.inputs.comment-id }} + name: Add extra info to git comment + needs: + - start-publish-image-runner-0 # required to get output from the start-runner job + - publish-image # required to wait when the main job is done + runs-on: ubuntu-latest + steps: + - name: Add hint for manual seed definition update uses: peter-evans/create-or-update-comment@v1 with: comment-id: ${{ github.event.inputs.comment-id }} body: | - > :white_check_mark: ${{github.event.inputs.connector}} https://github.com/${{github.repository}}/actions/runs/${{github.run_id}} +
+ + if you have connectors that successfully published but failed definition generation, follow [step 4 here ā–¶ļø](https://docs.airbyte.com/connector-development/#publishing-a-connector) # In case of self-hosted EC2 errors, remove this block. - stop-publish-image-runner: + stop-publish-image-runner-0: + if: ${{ always() }} # required to stop the runner even if the error happened in the previous jobs name: Stop Build EC2 Runner needs: - - start-publish-image-runner # required to get output from the start-runner job + - start-publish-image-runner-0 # required to get output from the start-runner job + - preprocess-matrix - publish-image # required to wait when the main job is done - find_valid_pat + - add-helpful-info-to-git-comment + runs-on: ubuntu-latest + steps: + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }} + aws-region: us-east-2 + - name: Stop EC2 runner + uses: airbytehq/ec2-github-runner@base64v1.1.0 + with: + mode: stop + github-token: ${{ needs.find_valid_pat.outputs.pat }} + label: ${{ needs.start-publish-image-runner-0.outputs.label }} + ec2-instance-id: ${{ needs.start-publish-image-runner-0.outputs.ec2-instance-id }} + stop-publish-image-runner-multi: + if: ${{ always() && github.event.inputs.parallel == 'true' }} + name: Stop Build EC2 Runner + needs: + - start-publish-image-runner-0 + - start-publish-image-runner-1 + - start-publish-image-runner-2 + - start-publish-image-runner-3 + - start-publish-image-runner-4 + - preprocess-matrix + - publish-image # required to wait when the main job is done + - find_valid_pat + strategy: + fail-fast: false + matrix: + ec2-instance: + [ + { + "label": "${{ needs.start-publish-image-runner-1.outputs.label }}", + "id": "${{ needs.start-publish-image-runner-1.outputs.ec2-instance-id }}", + }, + { + "label": "${{ needs.start-publish-image-runner-2.outputs.label }}", + "id": "${{ needs.start-publish-image-runner-2.outputs.ec2-instance-id }}", + }, + { + "label": "${{ needs.start-publish-image-runner-3.outputs.label }}", + "id": "${{ needs.start-publish-image-runner-3.outputs.ec2-instance-id }}", + }, + { + "label": "${{ needs.start-publish-image-runner-4.outputs.label }}", + "id": "${{ needs.start-publish-image-runner-4.outputs.ec2-instance-id }}", + }, + ] runs-on: ubuntu-latest - if: ${{ always() }} # required to stop the runner even if the error happened in the previous jobs steps: - name: Configure AWS credentials uses: aws-actions/configure-aws-credentials@v1 @@ -268,9 +441,9 @@ jobs: aws-secret-access-key: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }} aws-region: us-east-2 - name: Stop EC2 runner - uses: supertopher/ec2-github-runner@base64v1.0.10 + uses: airbytehq/ec2-github-runner@base64v1.1.0 with: mode: stop github-token: ${{ needs.find_valid_pat.outputs.pat }} - label: ${{ needs.start-publish-image-runner.outputs.label }} - ec2-instance-id: ${{ needs.start-publish-image-runner.outputs.ec2-instance-id }} + label: ${{ matrix.ec2-instance.label }} + ec2-instance-id: ${{ matrix.ec2-instance.id }} diff --git a/.github/workflows/publish-oss-for-cloud.yml b/.github/workflows/publish-oss-for-cloud.yml new file mode 100644 index 000000000000..e7acb72f4312 --- /dev/null +++ b/.github/workflows/publish-oss-for-cloud.yml @@ -0,0 +1,145 @@ +name: Publish OSS Artifacts for Cloud +concurrency: + group: ${{ github.workflow }}-${{ inputs.oss_ref || github.sha }} + +on: + workflow_dispatch: + inputs: + oss_ref: + description: "Publish artifacts for the following git ref (if unspecified, uses the latest commit for the current branch):" + required: false +jobs: + find_valid_pat: + name: "Find a PAT with room for actions" + timeout-minutes: 10 + runs-on: ubuntu-latest + outputs: + pat: ${{ steps.variables.outputs.pat }} + steps: + - name: Checkout Airbyte + uses: actions/checkout@v2 + - name: Check PAT rate limits + id: variables + run: | + ./tools/bin/find_non_rate_limited_PAT \ + ${{ secrets.AIRBYTEIO_PAT }} \ + ${{ secrets.OSS_BUILD_RUNNER_GITHUB_PAT }} \ + ${{ secrets.SUPERTOPHER_PAT }} \ + ${{ secrets.DAVINCHIA_PAT }} + start-runner: + name: "Start Runner on AWS" + needs: find_valid_pat + timeout-minutes: 10 + runs-on: ubuntu-latest + outputs: + label: ${{ steps.start-ec2-runner.outputs.label }} + ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }} + steps: + - name: Checkout Airbyte + uses: actions/checkout@v2 + - name: Start AWS Runner + id: start-ec2-runner + uses: ./.github/actions/start-aws-runner + with: + aws-access-key-id: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }} + github-token: ${{ needs.find_valid_pat.outputs.pat }} + + generate-tags: + name: "Generate Dev and Master Tags" + runs-on: ubuntu-latest + outputs: + dev_tag: ${{ steps.set-outputs.outputs.dev_tag }} + master_tag: ${{ steps.set-outputs.outputs.master_tag }} + steps: + - name: Checkout Airbyte + uses: actions/checkout@v2 + with: + ref: ${{ github.event.inputs.oss_ref || github.ref }} + - name: Generate Outputs + id: set-outputs + shell: bash + run: |- + set -x + + commit_sha=$(git rev-parse --short HEAD) + + # set dev_tag + # AirbyteVersion.java allows versions that have a prefix of 'dev' + echo "::set-output name=dev_tag::dev-${commit_sha}" + + # If this commit is on the master branch, also set master_tag + if $(git merge-base --is-ancestor "${commit_sha}" master); then + echo "::set-output name=master_tag::${commit_sha}" + fi + + oss-branch-build: + name: "Gradle Build and Publish" + needs: + - start-runner + - generate-tags + runs-on: ${{ needs.start-runner.outputs.label }} + environment: more-secrets + steps: + - name: Checkout Airbyte + uses: actions/checkout@v2 + with: + ref: ${{ github.event.inputs.oss_ref || github.ref }} + + - name: Build Branch + uses: ./.github/actions/build-branch + with: + branch_version_tag: ${{ needs.generate-tags.outputs.dev_tag }} + + - name: Publish Dev Jars + env: + CLOUDREPO_USER: ${{ secrets.CLOUDREPO_USER }} + CLOUDREPO_PASSWORD: ${{ secrets.CLOUDREPO_PASSWORD }} + run: VERSION=${{ needs.generate-tags.outputs.dev_tag }} SUB_BUILD=PLATFORM ./gradlew publish + shell: bash + + - name: Publish Master Jars + if: needs.generate-tags.outputs.master_tag != '' + env: + CLOUDREPO_USER: ${{ secrets.CLOUDREPO_USER }} + CLOUDREPO_PASSWORD: ${{ secrets.CLOUDREPO_PASSWORD }} + run: VERSION=${{ needs.generate-tags.outputs.master_tag }} SUB_BUILD=PLATFORM ./gradlew publish + shell: bash + + docker-push: + name: "Push Docker Images" + needs: + - start-runner + - generate-tags + - oss-branch-build + runs-on: ${{ needs.start-runner.outputs.label }} + steps: + - name: Login to Docker (on Master) + uses: docker/login-action@v1 + with: + username: ${{ secrets.DOCKER_HUB_USERNAME }} + password: ${{ secrets.DOCKER_HUB_PASSWORD }} + + - name: Prepare Docker buildx + run: | + docker run --rm --privileged multiarch/qemu-user-static --reset -p yes + docker buildx create --name oss-buildx --driver docker-container --use + shell: bash + + - name: Set Git Revision + run: | + GIT_REVISION=$(git rev-parse HEAD) + [ [ -z "$GIT_REVISION" ] ] && echo "Couldn't get the git revision..." && exit 1 + echo "GIT_REVISION=${GIT_REVISION}" >> $GITHUB_ENV + shell: bash + + - name: Push Docker Images + env: + VERSION: ${{ needs.generate-tags.outputs.dev_tag }} + ALT_TAG: ${{ needs.generate-tags.outputs.master_tag }} + run: GIT_REVISION=$GIT_REVISION docker buildx bake -f docker-compose-cloud.buildx.yaml --push + shell: bash + + - name: Cleanup Docker buildx + run: docker buildx rm oss-buildx + shell: bash diff --git a/.github/workflows/release-airbyte-os.yml b/.github/workflows/release-airbyte-os.yml index 439f03d975d3..14a284cbfe52 100644 --- a/.github/workflows/release-airbyte-os.yml +++ b/.github/workflows/release-airbyte-os.yml @@ -60,9 +60,9 @@ jobs: with: java-version: "17" - - uses: actions/setup-node@v1 + - uses: actions/setup-node@v2 with: - node-version: "16.13.0" + node-version: "lts/gallium" # necessary to install pip - uses: actions/setup-python@v2 with: @@ -92,9 +92,9 @@ jobs: with: java-version: "17" - - uses: actions/setup-node@v1 + - uses: actions/setup-node@v2 with: - node-version: "16.13.0" + node-version: "lts/gallium" - uses: actions/setup-python@v2 with: python-version: "3.9" diff --git a/.github/workflows/run-specific-test-command.yml b/.github/workflows/run-specific-test-command.yml index c215b845e70c..6d43e57a7cd5 100644 --- a/.github/workflows/run-specific-test-command.yml +++ b/.github/workflows/run-specific-test-command.yml @@ -50,9 +50,9 @@ jobs: with: java-version: '14' - - uses: actions/setup-node@v1 + - uses: actions/setup-node@v2 with: - node-version: '16.13.0' + node-version: 'lts/gallium' - name: Build id: run-specific-test diff --git a/.github/workflows/terminate-zombie-build-instances.yml b/.github/workflows/terminate-zombie-build-instances.yml index 53e214727a54..f45112a55cb6 100644 --- a/.github/workflows/terminate-zombie-build-instances.yml +++ b/.github/workflows/terminate-zombie-build-instances.yml @@ -1,6 +1,7 @@ # Required since we cannot guarantee instances are always terminated. # Also a failsafe against a dev writing a workflow that does not terminate build instances. -# The average runtime as of this commit is ~20 mins. Set this to an hour for some buffer. +# Though the average Airbyte build runtime as of this commit is ~20 mins, connector builds +# can take up to 3 hours. Set this to 3 hours to include these longer runs. name: Terminate Zombie Build Instances on: @@ -12,7 +13,7 @@ jobs: terminate: runs-on: ubuntu-latest steps: - - name: List and Terminate Instances Older Than an Hour + - name: List and Terminate Instances Older Than 3 Hours env: AWS_ACCESS_KEY_ID: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }} @@ -21,6 +22,8 @@ jobs: run: | set -euxo pipefail + TIME_LIMIT=10800 # 3 hours + aws configure set default.region us-east-2 # See https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ec2/describe-instances.html for describe command. @@ -28,9 +31,18 @@ jobs: export to_terminate=$(aws ec2 describe-instances --no-paginate --filters Name=instance-type,Values=c5.2xlarge Name=instance-state-name,Values=running \ --query 'Reservations[*].Instances[*].{Instance:InstanceId,LaunchTime:LaunchTime}' --output json \ | jq 'def toZ(str): str | (split("+")[0] + "Z") | fromdate ; - flatten | map( { InstanceId: .Instance, LaunchTime: toZ(.LaunchTime) } ) | map( select ( .LaunchTime < (now - 3600) ) )') + flatten | map( { InstanceId: .Instance, LaunchTime: toZ(.LaunchTime) } ) | map( select ( .LaunchTime < (now - $TIME_LIMIT) ) )') echo "MARKED FOR TERMINATION: ${to_terminate}" # See https://docs.aws.amazon.com/cli/latest/reference/ec2/terminate-instances.html for terminate command. echo $to_terminate | jq '.[] | .InstanceId' | xargs --no-run-if-empty --max-args=1 aws ec2 terminate-instances --instance-ids + terminate-github-instances: + runs-on: ubuntu-latest + steps: + - name: Checkout Airbyte + uses: actions/checkout@v2 + - name: List and Terminate GH actions in status 'offline' + env: + GITHUB_PAT: ${{ secrets.OCTAVIA_PAT }} + run: ./tools/bin/gh_action_zombie_killer diff --git a/.gitignore b/.gitignore index af6535c6435f..4027224bf7c8 100644 --- a/.gitignore +++ b/.gitignore @@ -64,3 +64,6 @@ resources/examples/airflow/logs/* # Cloud Demo !airbyte-webapp/src/packages/cloud/data + +# Summary.md keeps getting added and we just don't like it +docs/SUMMARY.md diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 000000000000..8193c5583a6f --- /dev/null +++ b/.prettierignore @@ -0,0 +1 @@ +airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output diff --git a/airbyte-api/build.gradle b/airbyte-api/build.gradle index 3b8b6e79dd49..f9314d1b0c64 100644 --- a/airbyte-api/build.gradle +++ b/airbyte-api/build.gradle @@ -28,7 +28,8 @@ task generateApiServer(type: GenerateTask) { 'DestinationDefinitionSpecification': 'com.fasterxml.jackson.databind.JsonNode', 'DestinationConfiguration' : 'com.fasterxml.jackson.databind.JsonNode', 'StreamJsonSchema' : 'com.fasterxml.jackson.databind.JsonNode', - 'ConnectionStateObject' : 'com.fasterxml.jackson.databind.JsonNode', + 'StateBlob' : 'com.fasterxml.jackson.databind.JsonNode', + 'FieldSchema' : 'com.fasterxml.jackson.databind.JsonNode', ] generateApiDocumentation = false @@ -70,7 +71,8 @@ task generateApiClient(type: GenerateTask) { 'DestinationDefinitionSpecification': 'com.fasterxml.jackson.databind.JsonNode', 'DestinationConfiguration' : 'com.fasterxml.jackson.databind.JsonNode', 'StreamJsonSchema' : 'com.fasterxml.jackson.databind.JsonNode', - 'ConnectionStateObject' : 'com.fasterxml.jackson.databind.JsonNode', + 'StateBlob' : 'com.fasterxml.jackson.databind.JsonNode', + 'FieldSchema' : 'com.fasterxml.jackson.databind.JsonNode', ] library = "native" @@ -103,7 +105,8 @@ task generateApiDocs(type: GenerateTask) { 'DestinationDefinitionSpecification': 'com.fasterxml.jackson.databind.JsonNode', 'DestinationConfiguration' : 'com.fasterxml.jackson.databind.JsonNode', 'StreamJsonSchema' : 'com.fasterxml.jackson.databind.JsonNode', - 'ConnectionStateObject' : 'com.fasterxml.jackson.databind.JsonNode', + 'StateBlob' : 'com.fasterxml.jackson.databind.JsonNode', + 'FieldSchema' : 'com.fasterxml.jackson.databind.JsonNode', ] generateApiDocumentation = false diff --git a/airbyte-api/src/main/openapi/config.yaml b/airbyte-api/src/main/openapi/config.yaml index 426e93e63054..9e1438edc081 100644 --- a/airbyte-api/src/main/openapi/config.yaml +++ b/airbyte-api/src/main/openapi/config.yaml @@ -1996,6 +1996,29 @@ paths: $ref: "#/components/schemas/WebBackendConnectionReadList" "422": $ref: "#/components/responses/InvalidInputResponse" + /v1/web_backend/state/get_type: + post: + tags: + - connection + summary: Fetch the current state type for a connection. + operationId: getStateType + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionIdRequestBody" + required: true + responses: + "200": + description: Successful operation + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionStateType" + "404": + $ref: "#/components/responses/NotFoundResponse" + "422": + $ref: "#/components/responses/InvalidInputResponse" /v1/web_backend/workspace/state: post: tags: @@ -3585,7 +3608,6 @@ components: type: string description: Stream's name. jsonSchema: - description: Stream schema using Json Schema specs. $ref: "#/components/schemas/StreamJsonSchema" supportedSyncModes: type: array @@ -3610,6 +3632,7 @@ components: type: string description: Optional Source-defined namespace. Airbyte streams from the same sources should have the same namespace. Currently only used by JDBC destinations to determine what schema to write to. StreamJsonSchema: + description: Stream schema using Json Schema specs. type: object AirbyteStreamConfiguration: description: the mutable part of the stream to configure the destination @@ -3674,7 +3697,6 @@ components: configId: type: string pagination: - type: object $ref: "#/components/schemas/Pagination" JobIdRequestBody: type: object @@ -3707,6 +3729,25 @@ components: format: int64 status: $ref: "#/components/schemas/JobStatus" + resetConfig: + $ref: "#/components/schemas/ResetConfig" + ResetConfig: + type: object + description: contains information about how a reset was configured. only populated if the job was a reset. + properties: + streamsToReset: + type: array + items: + $ref: "#/components/schemas/StreamDescriptor" + StreamDescriptor: + type: object + required: + - name + properties: + name: + type: string + namespace: + type: string JobDebugRead: type: object required: @@ -3980,14 +4021,130 @@ components: $ref: "#/components/schemas/SynchronousJobRead" ConnectionState: type: object + description: Contains the state for a connection. The stateType field identifies what type of state it is. Only the field corresponding to that type will be set, the rest will be null. If stateType=not_set, then none of the fields will be set. required: - connectionId + - stateType properties: + stateType: + $ref: "#/components/schemas/ConnectionStateType" connectionId: $ref: "#/components/schemas/ConnectionId" - state: - $ref: "#/components/schemas/ConnectionStateObject" - ConnectionStateObject: + state: # legacy state object + $ref: "#/components/schemas/StateBlob" + streamState: + type: array + items: + $ref: "#/components/schemas/StreamState" + globalState: + $ref: "#/components/schemas/GlobalState" + StateBlob: + type: object + StreamState: + type: object + required: + - streamDescriptor + properties: + streamDescriptor: + $ref: "#/components/schemas/StreamDescriptor" + streamState: + $ref: "#/components/schemas/StateBlob" + GlobalState: + type: object + required: + - streamStates + properties: + shared_state: + $ref: "#/components/schemas/StateBlob" + streamStates: + type: array + items: + $ref: "#/components/schemas/StreamState" + ConnectionStateType: + type: string + enum: + - global + - stream + - legacy + - not_set + CatalogDiff: + type: object + description: Describes the difference between two Airbyte catalogs. + required: + - transforms + properties: + transforms: + description: list of stream transformations. order does not matter. + type: array + items: + $ref: "#/components/schemas/StreamTransform" + StreamTransform: + type: object + required: + - transformType + - streamDescriptor + properties: + transformType: + type: string + enum: + - add_stream + - remove_stream + - update_stream + streamDescriptor: + $ref: "#/components/schemas/StreamDescriptor" + updateStream: + type: array + description: list of field transformations. order does not matter. + items: + $ref: "#/components/schemas/FieldTransform" + FieldTransform: + type: object + description: "Describes the difference between two Streams." + required: + - transformType + - fieldName + properties: + transformType: + type: string + enum: + - add_field + - remove_field + - update_field_schema + fieldName: + $ref: "#/components/schemas/FieldName" + addField: + $ref: "#/components/schemas/FieldAdd" + removeField: + $ref: "#/components/schemas/FieldRemove" + updateFieldSchema: + $ref: "#/components/schemas/FieldSchemaUpdate" + FieldAdd: + type: object + properties: + schema: + $ref: "#/components/schemas/FieldSchema" + FieldRemove: + type: object + properties: + schema: + $ref: "#/components/schemas/FieldSchema" + FieldSchemaUpdate: + type: object + required: + - oldSchema + - newSchema + properties: + oldSchema: + $ref: "#/components/schemas/FieldSchema" + newSchema: + $ref: "#/components/schemas/FieldSchema" + FieldName: + description: A field name is a list of strings that form the path to the field. + type: array + items: + type: string + FieldSchema: + description: JSONSchema representation of the field type: object ActorDefinitionResourceRequirements: description: actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level. @@ -4099,10 +4256,8 @@ components: $ref: "#/components/schemas/DbMigrationRead" # OAuth OAuthConfiguration: - description: OAuth specific blob. + description: The values required to configure OAuth flows. The schema for this must match the `OAuthConfigSpecification.oauthUserInputFromConnectorConfigSpecification` schema. OAuthInputConfiguration: - description: The values required to configure OAuth flows. - The schema for this must match the `OAuthConfigSpecification.oauthUserInputFromConnectorConfigSpecification` schema. $ref: "#/components/schemas/OAuthConfiguration" AdvancedAuth: type: object @@ -4372,6 +4527,8 @@ components: catalogId: type: string format: uuid + catalogDiff: + $ref: "#/components/schemas/CatalogDiff" WebBackendConnectionReadList: type: object required: diff --git a/airbyte-bootloader/Dockerfile b/airbyte-bootloader/Dockerfile index 06c8b23ec137..24b0ae64e97c 100644 --- a/airbyte-bootloader/Dockerfile +++ b/airbyte-bootloader/Dockerfile @@ -2,7 +2,7 @@ ARG JDK_VERSION=17.0.1 ARG JDK_IMAGE=openjdk:${JDK_VERSION}-slim FROM ${JDK_IMAGE} -ARG VERSION=0.39.17-alpha +ARG VERSION=0.39.28-alpha ENV APPLICATION airbyte-bootloader ENV VERSION ${VERSION} diff --git a/airbyte-bootloader/build.gradle b/airbyte-bootloader/build.gradle index 033de2a273b7..211465e583ef 100644 --- a/airbyte-bootloader/build.gradle +++ b/airbyte-bootloader/build.gradle @@ -14,7 +14,7 @@ dependencies { implementation 'io.temporal:temporal-sdk:1.8.1' implementation libs.flyway.core - testImplementation libs.testcontainers.postgresql + testImplementation libs.platform.testcontainers.postgresql testImplementation 'uk.org.webcompere:system-stubs-jupiter:1.2.0' } diff --git a/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderAppTest.java b/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderAppTest.java index b1b9dc0af361..38366c889537 100644 --- a/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderAppTest.java +++ b/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderAppTest.java @@ -129,7 +129,7 @@ void testBootloaderAppBlankDb() throws Exception { val configsMigrator = new ConfigsDatabaseMigrator(configDatabase, configsFlyway); // this line should change with every new migration // to show that you meant to make a new migration to the prod database - assertEquals("0.39.1.001", configsMigrator.getLatestMigration().getVersion().getVersion()); + assertEquals("0.39.17.001", configsMigrator.getLatestMigration().getVersion().getVersion()); val jobsPersistence = new DefaultJobPersistence(jobDatabase); assertEquals(version, jobsPersistence.getVersion().get()); diff --git a/airbyte-cdk/python/CHANGELOG.md b/airbyte-cdk/python/CHANGELOG.md index 17118852e5ff..b689032f7a10 100644 --- a/airbyte-cdk/python/CHANGELOG.md +++ b/airbyte-cdk/python/CHANGELOG.md @@ -1,5 +1,11 @@ # Changelog +## 0.1.62 +Bugfix: Correctly obfuscate nested secrets and secrets specified inside oneOf blocks inside the connector's spec. + +## 0.1.61 +- Remove legacy sentry code + ## 0.1.60 - Add `requests.exceptions.ChunkedEncodingError` to transient errors so it could be retried diff --git a/airbyte-cdk/python/README.md b/airbyte-cdk/python/README.md index 061d759f6341..5c5700141a91 100644 --- a/airbyte-cdk/python/README.md +++ b/airbyte-cdk/python/README.md @@ -8,7 +8,7 @@ The Airbyte Python CDK is a framework for rapidly developing production-grade Ai The CDK provides an improved developer experience by providing basic implementation structure and abstracting away low-level glue boilerplate. -This document is a general introduction to the CDK. Readers should have basic familiarity with the [Airbyte Specification](https://docs.airbyte.io/architecture/airbyte-specification) before proceeding. +This document is a general introduction to the CDK. Readers should have basic familiarity with the [Airbyte Specification](https://docs.airbyte.io/architecture/airbyte-protocol) before proceeding. ## Getting Started diff --git a/airbyte-cdk/python/airbyte_cdk/entrypoint.py b/airbyte-cdk/python/airbyte_cdk/entrypoint.py index 3409f9293f0a..d996c7798e8d 100644 --- a/airbyte-cdk/python/airbyte_cdk/entrypoint.py +++ b/airbyte-cdk/python/airbyte_cdk/entrypoint.py @@ -9,15 +9,14 @@ import os.path import sys import tempfile -from typing import Any, Dict, Iterable, List +from typing import Iterable, List from airbyte_cdk.exception_handler import init_uncaught_exception_handler from airbyte_cdk.logger import init_logger from airbyte_cdk.models import AirbyteMessage, Status, Type from airbyte_cdk.models.airbyte_protocol import ConnectorSpecification from airbyte_cdk.sources import Source -from airbyte_cdk.sources.utils.schema_helpers import check_config_against_spec_or_exit, get_secret_values, split_config -from airbyte_cdk.sources.utils.sentry import AirbyteSentry +from airbyte_cdk.sources.utils.schema_helpers import check_config_against_spec_or_exit, split_config from airbyte_cdk.utils.airbyte_secrets_utils import get_secrets, update_secrets logger = init_logger("airbyte") @@ -63,15 +62,6 @@ def parse_args(args: List[str]) -> argparse.Namespace: return main_parser.parse_args(args) - def configure_sentry(self, spec_schema: Dict[str, Any], parsed_args: argparse.Namespace): - secret_values = [] - if "config" in parsed_args: - config = self.source.read_config(parsed_args.config) - secret_values = get_secret_values(spec_schema, config) - source_name = self.source.__module__.split(".")[0] - source_name = source_name.split("_", 1)[-1] - AirbyteSentry.init(source_tag=source_name, secret_values=secret_values) - def run(self, parsed_args: argparse.Namespace) -> Iterable[str]: cmd = parsed_args.command if not cmd: @@ -79,7 +69,6 @@ def run(self, parsed_args: argparse.Namespace) -> Iterable[str]: # todo: add try catch for exceptions with different exit codes source_spec: ConnectorSpecification = self.source.spec(self.logger) - self.configure_sentry(source_spec.connectionSpecification, parsed_args) with tempfile.TemporaryDirectory() as temp_dir: if cmd == "spec": message = AirbyteMessage(type=Type.SPEC, spec=source_spec) @@ -90,7 +79,7 @@ def run(self, parsed_args: argparse.Namespace) -> Iterable[str]: # Now that we have the config, we can use it to get a list of ai airbyte_secrets # that we should filter in logging to avoid leaking secrets - config_secrets = get_secrets(self.source, config, self.logger) + config_secrets = get_secrets(source_spec.connectionSpecification, config) update_secrets(config_secrets) # Remove internal flags from config before validating so diff --git a/airbyte-cdk/python/airbyte_cdk/models/airbyte_protocol.py b/airbyte-cdk/python/airbyte_cdk/models/airbyte_protocol.py index 6b8a67c8fd93..0b1394711782 100644 --- a/airbyte-cdk/python/airbyte_cdk/models/airbyte_protocol.py +++ b/airbyte-cdk/python/airbyte_cdk/models/airbyte_protocol.py @@ -27,25 +27,34 @@ class AirbyteRecordMessage(BaseModel): class Config: extra = Extra.allow - stream: str = Field(..., description="the name of this record's stream") - data: Dict[str, Any] = Field(..., description="the record data") + namespace: Optional[str] = Field(None, description="namespace the data is associated with") + stream: str = Field(..., description="stream the data is associated with") + data: Dict[str, Any] = Field(..., description="record data") emitted_at: int = Field( ..., description="when the data was emitted from the source. epoch in millisecond.", ) - namespace: Optional[str] = Field(None, description="the namespace of this record's stream") class AirbyteStateType(Enum): GLOBAL = "GLOBAL" - PER_STREAM = "PER_STREAM" + STREAM = "STREAM" + LEGACY = "LEGACY" + + +class StreamDescriptor(BaseModel): + class Config: + extra = Extra.allow + + name: str + namespace: Optional[str] = None class AirbyteStateBlob(BaseModel): pass class Config: - extra = Extra.forbid + extra = Extra.allow class Level(Enum): @@ -61,8 +70,8 @@ class AirbyteLogMessage(BaseModel): class Config: extra = Extra.allow - level: Level = Field(..., description="the type of logging") - message: str = Field(..., description="the log message") + level: Level = Field(..., description="log level") + message: str = Field(..., description="log message") class TraceType(Enum): @@ -164,11 +173,18 @@ class OAuthConfigSpecification(BaseModel): class AirbyteStreamState(BaseModel): class Config: - extra = Extra.forbid + extra = Extra.allow + + stream_descriptor: StreamDescriptor + stream_state: Optional[AirbyteStateBlob] = None + + +class AirbyteGlobalState(BaseModel): + class Config: + extra = Extra.allow - name: str = Field(..., description="Stream name") - state: AirbyteStateBlob - namespace: Optional[str] = Field(None, description="Optional Source-defined namespace.") + shared_state: Optional[AirbyteStateBlob] = None + stream_states: List[AirbyteStreamState] class AirbyteTraceMessage(BaseModel): @@ -245,7 +261,10 @@ class Config: ..., description="ConnectorDefinition specific blob. Must be a valid JSON string.", ) - supportsIncremental: Optional[bool] = Field(None, description="If the connector supports incremental mode or not.") + supportsIncremental: Optional[bool] = Field( + None, + description="(deprecated) If the connector supports incremental mode or not.", + ) supportsNormalization: Optional[bool] = Field(False, description="If the connector supports normalization or not.") supportsDBT: Optional[bool] = Field(False, description="If the connector supports DBT or not.") supported_destination_sync_modes: Optional[List[DestinationSyncMode]] = Field( @@ -262,10 +281,10 @@ class AirbyteStateMessage(BaseModel): class Config: extra = Extra.allow - state_type: Optional[AirbyteStateType] = None + type: Optional[AirbyteStateType] = None + stream: Optional[AirbyteStreamState] = None + global_: Optional[AirbyteGlobalState] = Field(None, alias="global") data: Optional[Dict[str, Any]] = Field(None, description="(Deprecated) the state data") - global_: Optional[AirbyteStateBlob] = Field(None, alias="global") - streams: Optional[List[AirbyteStreamState]] = None class AirbyteCatalog(BaseModel): diff --git a/airbyte-cdk/python/airbyte_cdk/sources/abstract_source.py b/airbyte-cdk/python/airbyte_cdk/sources/abstract_source.py index d8c8acc16387..7d2eaa528df0 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/abstract_source.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/abstract_source.py @@ -68,14 +68,14 @@ def name(self) -> str: def discover(self, logger: logging.Logger, config: Mapping[str, Any]) -> AirbyteCatalog: """Implements the Discover operation from the Airbyte Specification. - See https://docs.airbyte.io/architecture/airbyte-specification. + See https://docs.airbyte.io/architecture/airbyte-protocol. """ streams = [stream.as_airbyte_stream() for stream in self.streams(config=config)] return AirbyteCatalog(streams=streams) def check(self, logger: logging.Logger, config: Mapping[str, Any]) -> AirbyteConnectionStatus: """Implements the Check Connection operation from the Airbyte Specification. - See https://docs.airbyte.io/architecture/airbyte-specification. + See https://docs.airbyte.io/architecture/airbyte-protocol. """ try: check_succeeded, error = self.check_connection(logger, config) @@ -93,7 +93,7 @@ def read( catalog: ConfiguredAirbyteCatalog, state: MutableMapping[str, Any] = None, ) -> Iterator[AirbyteMessage]: - """Implements the Read operation from the Airbyte Specification. See https://docs.airbyte.io/architecture/airbyte-specification.""" + """Implements the Read operation from the Airbyte Specification. See https://docs.airbyte.io/architecture/airbyte-protocol.""" connector_state = copy.deepcopy(state or {}) logger.info(f"Starting syncing {self.name}") config, internal_config = split_config(config) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_stream.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_stream.py index 0fd28c2b0caf..49fa6db4b379 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_stream.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_stream.py @@ -16,12 +16,12 @@ class DeclarativeStream(Stream): DeclarativeStream is a Stream that delegates most of its logic to its schema_load and retriever """ - def __init__(self, name, primary_key, cursor_field, schema_loader: SchemaLoader, retriever): + def __init__(self, name, primary_key, schema_loader: SchemaLoader, retriever: Retriever, cursor_field: Optional[List[str]] = None): self._name = name self._primary_key = primary_key - self._cursor_field = cursor_field + self._cursor_field = cursor_field or [] self._schema_loader = schema_loader - self._retriever: Retriever = retriever + self._retriever = retriever @property def primary_key(self) -> Optional[Union[str, List[str], List[List[str]]]]: diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/extractors/http_extractor.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/extractors/http_extractor.py deleted file mode 100644 index 73e28ecf8204..000000000000 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/extractors/http_extractor.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - -from abc import ABC, abstractmethod -from typing import List - -import requests -from airbyte_cdk.sources.declarative.types import Record - - -class HttpExtractor(ABC): - @abstractmethod - def extract_records(self, response: requests.Response) -> List[Record]: - pass diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/extractors/http_selector.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/extractors/http_selector.py new file mode 100644 index 000000000000..a57fccba316e --- /dev/null +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/extractors/http_selector.py @@ -0,0 +1,21 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from abc import ABC, abstractmethod +from typing import Any, List, Mapping + +import requests +from airbyte_cdk.sources.declarative.types import Record + + +class HttpSelector(ABC): + @abstractmethod + def select_records( + self, + response: requests.Response, + stream_state: Mapping[str, Any], + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> List[Record]: + pass diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/extractors/jello.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/extractors/jello.py index bac23222da67..9dd1dc452107 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/extractors/jello.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/extractors/jello.py @@ -2,27 +2,25 @@ # Copyright (c) 2022 Airbyte, Inc., all rights reserved. # -from typing import List +from typing import List, Optional import requests from airbyte_cdk.sources.declarative.decoders.decoder import Decoder -from airbyte_cdk.sources.declarative.extractors.http_extractor import HttpExtractor +from airbyte_cdk.sources.declarative.decoders.json_decoder import JsonDecoder from airbyte_cdk.sources.declarative.interpolation.jinja import JinjaInterpolation from airbyte_cdk.sources.declarative.types import Record from jello import lib as jello_lib -class JelloExtractor(HttpExtractor): +class JelloExtractor: default_transform = "." - def __init__(self, transform: str, decoder: Decoder, config, kwargs=None): - if kwargs is None: - kwargs = dict() + def __init__(self, transform: str, decoder: Optional[Decoder] = None, config=None, kwargs=None): self._interpolator = JinjaInterpolation() self._transform = transform + self._decoder = decoder or JsonDecoder() self._config = config - self._kwargs = kwargs - self._decoder = decoder + self._kwargs = kwargs or dict() def extract_records(self, response: requests.Response) -> List[Record]: response_body = self._decoder.decode(response) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/extractors/record_filter.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/extractors/record_filter.py new file mode 100644 index 000000000000..8351bd2c03ef --- /dev/null +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/extractors/record_filter.py @@ -0,0 +1,24 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from typing import Any, List, Mapping + +from airbyte_cdk.sources.declarative.interpolation.interpolated_boolean import InterpolatedBoolean +from airbyte_cdk.sources.declarative.types import Record + + +class RecordFilter: + def __init__(self, config, condition: str = None): + self._config = config + self._filter_interpolator = InterpolatedBoolean(condition) + + def filter_records( + self, + records: List[Record], + stream_state: Mapping[str, Any], + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> List[Record]: + kwargs = {"stream_state": stream_state, "stream_slice": stream_slice, "next_page_token": next_page_token} + return [record for record in records if self._filter_interpolator.eval(self._config, record=record, **kwargs)] diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/extractors/record_selector.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/extractors/record_selector.py new file mode 100644 index 000000000000..4af93121dbc9 --- /dev/null +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/extractors/record_selector.py @@ -0,0 +1,36 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from typing import Any, List, Mapping + +import requests +from airbyte_cdk.sources.declarative.extractors.http_selector import HttpSelector +from airbyte_cdk.sources.declarative.extractors.jello import JelloExtractor +from airbyte_cdk.sources.declarative.extractors.record_filter import RecordFilter +from airbyte_cdk.sources.declarative.types import Record + + +class RecordSelector(HttpSelector): + """ + Responsible for translating an HTTP response into a list of records by extracting records from the response and optionally filtering + records based on a heuristic. + """ + + def __init__(self, extractor: JelloExtractor, record_filter: RecordFilter = None): + self._extractor = extractor + self._record_filter = record_filter + + def select_records( + self, + response: requests.Response, + stream_state: Mapping[str, Any], + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> List[Record]: + all_records = self._extractor.extract_records(response) + if self._record_filter: + return self._record_filter.filter_records( + all_records, stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token + ) + return all_records diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/class_types_registry.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/class_types_registry.py new file mode 100644 index 000000000000..bc01d03e880f --- /dev/null +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/class_types_registry.py @@ -0,0 +1,17 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from typing import Mapping, Type + +from airbyte_cdk.sources.declarative.requesters.paginators.interpolated_paginator import InterpolatedPaginator +from airbyte_cdk.sources.declarative.requesters.paginators.next_page_url_paginator import NextPageUrlPaginator +from airbyte_cdk.sources.declarative.requesters.paginators.offset_paginator import OffsetPaginator +from airbyte_cdk.sources.streams.http.requests_native_auth.token import TokenAuthenticator + +CLASS_TYPES_REGISTRY: Mapping[str, Type] = { + "NextPageUrlPaginator": NextPageUrlPaginator, + "InterpolatedPaginator": InterpolatedPaginator, + "OffsetPaginator": OffsetPaginator, + "TokenAuthenticator": TokenAuthenticator, +} diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/default_implementation_registry.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/default_implementation_registry.py new file mode 100644 index 000000000000..9721868f22c1 --- /dev/null +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/default_implementation_registry.py @@ -0,0 +1,32 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from typing import Mapping, Type + +from airbyte_cdk.sources.declarative.checks.check_stream import CheckStream +from airbyte_cdk.sources.declarative.checks.connection_checker import ConnectionChecker +from airbyte_cdk.sources.declarative.decoders.decoder import Decoder +from airbyte_cdk.sources.declarative.decoders.json_decoder import JsonDecoder +from airbyte_cdk.sources.declarative.extractors.http_selector import HttpSelector +from airbyte_cdk.sources.declarative.extractors.jello import JelloExtractor +from airbyte_cdk.sources.declarative.extractors.record_selector import RecordSelector +from airbyte_cdk.sources.declarative.requesters.http_requester import HttpRequester +from airbyte_cdk.sources.declarative.requesters.requester import Requester +from airbyte_cdk.sources.declarative.requesters.retriers.default_retrier import DefaultRetrier +from airbyte_cdk.sources.declarative.requesters.retriers.retrier import Retrier +from airbyte_cdk.sources.declarative.retrievers.retriever import Retriever +from airbyte_cdk.sources.declarative.retrievers.simple_retriever import SimpleRetriever +from airbyte_cdk.sources.declarative.schema.json_schema import JsonSchema +from airbyte_cdk.sources.declarative.schema.schema_loader import SchemaLoader + +DEFAULT_IMPLEMENTATIONS_REGISTRY: Mapping[Type, Type] = { + Requester: HttpRequester, + Retriever: SimpleRetriever, + SchemaLoader: JsonSchema, + HttpSelector: RecordSelector, + ConnectionChecker: CheckStream, + Retrier: DefaultRetrier, + Decoder: JsonDecoder, + JelloExtractor: JelloExtractor, +} diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/factory.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/factory.py index 011a26bc5ea3..4b0e4776579e 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/factory.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/factory.py @@ -6,10 +6,12 @@ import copy import importlib -from typing import Any, Mapping +from typing import Any, Mapping, Type, Union, get_type_hints from airbyte_cdk.sources.declarative.create_partial import create from airbyte_cdk.sources.declarative.interpolation.jinja import JinjaInterpolation +from airbyte_cdk.sources.declarative.parsers.class_types_registry import CLASS_TYPES_REGISTRY +from airbyte_cdk.sources.declarative.parsers.default_implementation_registry import DEFAULT_IMPLEMENTATIONS_REGISTRY from airbyte_cdk.sources.declarative.types import Config @@ -28,41 +30,88 @@ def create_component(self, component_definition: Mapping[str, Any], config: Conf class_name = kwargs.pop("class_name") return self.build(class_name, config, **kwargs) - def build(self, class_name: str, config, **kwargs): - fqcn = class_name - split = fqcn.split(".") - module = ".".join(split[:-1]) - class_name = split[-1] + def build(self, class_or_class_name: Union[str, Type], config, **kwargs): + if isinstance(class_or_class_name, str): + class_ = self._get_class_from_fully_qualified_class_name(class_or_class_name) + else: + class_ = class_or_class_name # create components in options before propagating them if "options" in kwargs: - kwargs["options"] = {k: self._create_subcomponent(v, kwargs, config) for k, v in kwargs["options"].items()} + kwargs["options"] = {k: self._create_subcomponent(k, v, kwargs, config, class_) for k, v in kwargs["options"].items()} - updated_kwargs = {k: self._create_subcomponent(v, kwargs, config) for k, v in kwargs.items()} + updated_kwargs = {k: self._create_subcomponent(k, v, kwargs, config, class_) for k, v in kwargs.items()} - class_ = getattr(importlib.import_module(module), class_name) return create(class_, config=config, **updated_kwargs) - def _merge_dicts(self, d1, d2): + @staticmethod + def _get_class_from_fully_qualified_class_name(class_name: str): + split = class_name.split(".") + module = ".".join(split[:-1]) + class_name = split[-1] + return getattr(importlib.import_module(module), class_name) + + @staticmethod + def _merge_dicts(d1, d2): return {**d1, **d2} - def _create_subcomponent(self, v, kwargs, config): - if isinstance(v, dict) and "class_name" in v: + def _create_subcomponent(self, key, definition, kwargs, config, parent_class): + """ + There are 5 ways to define a component. + 1. dict with "class_name" field -> create an object of type "class_name" + 2. dict with "type" field -> lookup the `CLASS_TYPES_REGISTRY` to find the type of object and create an object of that type + 3. a dict with a type that can be inferred. If the parent class's constructor has type hints, we can infer the type of the object to create by looking up the `DEFAULT_IMPLEMENTATIONS_REGISTRY` map + 4. list: loop over the list and create objects for its items + 5. anything else -> return as is + """ + if self.is_object_definition_with_class_name(definition): # propagate kwargs to inner objects - v["options"] = self._merge_dicts(kwargs.get("options", dict()), v.get("options", dict())) + definition["options"] = self._merge_dicts(kwargs.get("options", dict()), definition.get("options", dict())) - return self.create_component(v, config)() - elif isinstance(v, list): + return self.create_component(definition, config)() + elif self.is_object_definition_with_type(definition): + # If type is set instead of class_name, get the class_name from the CLASS_TYPES_REGISTRY + definition["options"] = self._merge_dicts(kwargs.get("options", dict()), definition.get("options", dict())) + object_type = definition.pop("type") + class_name = CLASS_TYPES_REGISTRY[object_type] + definition["class_name"] = class_name + return self.create_component(definition, config)() + elif isinstance(definition, dict): + # Try to infer object type + expected_type = self.get_default_type(key, parent_class) + if expected_type: + definition["class_name"] = expected_type + definition["options"] = self._merge_dicts(kwargs.get("options", dict()), definition.get("options", dict())) + return self.create_component(definition, config)() + else: + return definition + elif isinstance(definition, list): return [ self._create_subcomponent( - sub, self._merge_dicts(kwargs.get("options", dict()), self._get_subcomponent_options(sub)), config + key, sub, self._merge_dicts(kwargs.get("options", dict()), self._get_subcomponent_options(sub)), config, parent_class ) - for sub in v + for sub in definition ] else: - return v + return definition + + @staticmethod + def is_object_definition_with_class_name(definition): + return isinstance(definition, dict) and "class_name" in definition + + @staticmethod + def is_object_definition_with_type(definition): + return isinstance(definition, dict) and "type" in definition + + @staticmethod + def get_default_type(parameter_name, parent_class): + type_hints = get_type_hints(parent_class.__init__) + interface = type_hints.get(parameter_name) + expected_type = DEFAULT_IMPLEMENTATIONS_REGISTRY.get(interface) + return expected_type - def _get_subcomponent_options(self, sub: Any): + @staticmethod + def _get_subcomponent_options(sub: Any): if isinstance(sub, dict): return sub.get("options", {}) else: diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/http_requester.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/http_requester.py index 92e29eec4307..b83fa78057bd 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/http_requester.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/http_requester.py @@ -6,15 +6,12 @@ import requests from airbyte_cdk.sources.declarative.interpolation.interpolated_string import InterpolatedString -from airbyte_cdk.sources.declarative.requesters.request_headers.interpolated_request_header_provider import ( - InterpolatedRequestHeaderProvider, +from airbyte_cdk.sources.declarative.requesters.request_options.interpolated_request_options_provider import ( + InterpolatedRequestOptionsProvider, ) -from airbyte_cdk.sources.declarative.requesters.request_headers.request_header_provider import RequestHeaderProvider -from airbyte_cdk.sources.declarative.requesters.request_params.interpolated_request_parameter_provider import ( - InterpolatedRequestParameterProvider, -) -from airbyte_cdk.sources.declarative.requesters.request_params.request_parameters_provider import RequestParameterProvider +from airbyte_cdk.sources.declarative.requesters.request_options.request_options_provider import RequestOptionsProvider from airbyte_cdk.sources.declarative.requesters.requester import HttpMethod, Requester +from airbyte_cdk.sources.declarative.requesters.retriers.default_retrier import DefaultRetrier from airbyte_cdk.sources.declarative.requesters.retriers.retrier import Retrier from airbyte_cdk.sources.declarative.types import Config from airbyte_cdk.sources.streams.http.auth import HttpAuthenticator @@ -27,17 +24,16 @@ def __init__( name: str, url_base: [str, InterpolatedString], path: [str, InterpolatedString], - http_method: Union[str, HttpMethod], - request_parameters_provider: RequestParameterProvider = None, - request_headers_provider: RequestHeaderProvider = None, + http_method: Union[str, HttpMethod] = HttpMethod.GET, + request_options_provider: Optional[RequestOptionsProvider] = None, authenticator: HttpAuthenticator, - retrier: Retrier, + retrier: Optional[Retrier] = None, config: Config, ): - if request_parameters_provider is None: - request_parameters_provider = InterpolatedRequestParameterProvider(config=config, request_headers={}) - if request_headers_provider is None: - request_headers_provider = InterpolatedRequestHeaderProvider(config=config, request_headers={}) + if request_options_provider is None: + request_options_provider = InterpolatedRequestOptionsProvider(config=config) + elif isinstance(request_options_provider, dict): + request_options_provider = InterpolatedRequestOptionsProvider(config=config, **request_options_provider) self._name = name self._authenticator = authenticator if type(url_base) == str: @@ -49,16 +45,10 @@ def __init__( if type(http_method) == str: http_method = HttpMethod[http_method] self._method = http_method - self._request_parameters_provider = request_parameters_provider - self._request_headers_provider = request_headers_provider - self._retrier = retrier + self._request_options_provider = request_options_provider + self._retrier = retrier or DefaultRetrier() self._config = config - def request_params( - self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None - ) -> MutableMapping[str, Any]: - return self._request_parameters_provider.request_params(stream_state, stream_slice, next_page_token) - def get_authenticator(self): return self._authenticator @@ -92,28 +82,30 @@ def should_retry(self, response: requests.Response) -> bool: def backoff_time(self, response: requests.Response) -> Optional[float]: return self._retrier.backoff_time(response) + def request_params( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> MutableMapping[str, Any]: + return self._request_options_provider.request_params(stream_state, stream_slice, next_page_token) + def request_headers( self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None ) -> Mapping[str, Any]: - return self._request_headers_provider.request_headers(stream_state, stream_slice, next_page_token) + return self._request_options_provider.request_headers(stream_state, stream_slice, next_page_token) def request_body_data( self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None ) -> Optional[Union[Mapping, str]]: - # FIXME: this should be declarative - return dict() + return self._request_options_provider.request_body_data(stream_state, stream_slice, next_page_token) def request_body_json( self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None ) -> Optional[Mapping]: - # FIXME: this should be declarative - return dict() + return self._request_options_provider.request_body_json(stream_state, stream_slice, next_page_token) def request_kwargs( self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None ) -> Mapping[str, Any]: - # FIXME: this should be declarative - return dict() + return self._request_options_provider.request_kwargs(stream_state, stream_slice, next_page_token) @property def cache_filename(self) -> str: diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/interpolated_request_input_provider.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/interpolated_request_input_provider.py index 43dbbc8aeda0..cf8063fba5c4 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/interpolated_request_input_provider.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/interpolated_request_input_provider.py @@ -2,27 +2,35 @@ # Copyright (c) 2022 Airbyte, Inc., all rights reserved. # -from typing import Any, Mapping, MutableMapping +from typing import Any, Mapping, Union from airbyte_cdk.sources.declarative.interpolation.interpolated_mapping import InterpolatedMapping +from airbyte_cdk.sources.declarative.interpolation.interpolated_string import InterpolatedString from airbyte_cdk.sources.declarative.interpolation.jinja import JinjaInterpolation class InterpolatedRequestInputProvider: """ - Helper class that generically performs string interpolation on the provided dictionary input + Helper class that generically performs string interpolation on the provided dictionary or string input """ def __init__(self, *, config, request_inputs=None): + self._config = config + if request_inputs is None: request_inputs = {} - self._interpolator = InterpolatedMapping(request_inputs, JinjaInterpolation()) - self._config = config + if isinstance(request_inputs, str): + self._interpolator = InterpolatedString(request_inputs, "") + else: + self._interpolator = InterpolatedMapping(request_inputs, JinjaInterpolation()) def request_inputs( self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None - ) -> MutableMapping[str, Any]: + ) -> Union[Mapping, str]: kwargs = {"stream_state": stream_state, "stream_slice": stream_slice, "next_page_token": next_page_token} - interpolated_values = self._interpolator.eval(self._config, **kwargs) # dig into this function a little more - non_null_tokens = {k: v for k, v in interpolated_values.items() if v} - return non_null_tokens + interpolated_value = self._interpolator.eval(self._config, **kwargs) + + if isinstance(interpolated_value, dict): + non_null_tokens = {k: v for k, v in interpolated_value.items() if v} + return non_null_tokens + return interpolated_value diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/conditional_paginator.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/conditional_paginator.py index ce938d53b9e5..e7c6254be15d 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/conditional_paginator.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/conditional_paginator.py @@ -15,8 +15,8 @@ class ConditionalPaginator: A paginator that performs pagination by incrementing a page number and stops based on a provided stop condition. """ - def __init__(self, stop_condition_template: str, state: DictState, decoder: Decoder, config): - self._stop_condition_template = InterpolatedBoolean(stop_condition_template) + def __init__(self, stop_condition: str, state: DictState, decoder: Decoder, config): + self._stop_condition_interpolator = InterpolatedBoolean(stop_condition) self._state: DictState = state self._decoder = decoder self._config = config @@ -24,7 +24,7 @@ def __init__(self, stop_condition_template: str, state: DictState, decoder: Deco def next_page_token(self, response: requests.Response, last_records: List[Mapping[str, Any]]) -> Optional[Mapping[str, Any]]: decoded_response = self._decoder.decode(response) headers = response.headers - should_stop = self._stop_condition_template.eval( + should_stop = self._stop_condition_interpolator.eval( self._config, decoded_response=decoded_response, headers=headers, last_records=last_records ) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/interpolated_paginator.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/interpolated_paginator.py index 8031209ac4ef..5b786d24452b 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/interpolated_paginator.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/interpolated_paginator.py @@ -6,15 +6,17 @@ import requests from airbyte_cdk.sources.declarative.decoders.decoder import Decoder +from airbyte_cdk.sources.declarative.decoders.json_decoder import JsonDecoder from airbyte_cdk.sources.declarative.interpolation.interpolated_mapping import InterpolatedMapping from airbyte_cdk.sources.declarative.interpolation.jinja import JinjaInterpolation from airbyte_cdk.sources.declarative.requesters.paginators.paginator import Paginator +from airbyte_cdk.sources.declarative.types import Config class InterpolatedPaginator(Paginator): - def __init__(self, next_page_token_template: Mapping[str, str], decoder: Decoder, config): + def __init__(self, *, next_page_token_template: Mapping[str, str], config: Config, decoder: Optional[Decoder] = None): self._next_page_token_template = InterpolatedMapping(next_page_token_template, JinjaInterpolation()) - self._decoder = decoder + self._decoder = decoder or JsonDecoder() self._config = config def next_page_token(self, response: requests.Response, last_records: List[Mapping[str, Any]]) -> Optional[Mapping[str, Any]]: diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/next_page_url_paginator.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/next_page_url_paginator.py index ea7a33fb66b6..89f76eb34e04 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/next_page_url_paginator.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/next_page_url_paginator.py @@ -7,14 +7,29 @@ import requests from airbyte_cdk.sources.declarative.requesters.paginators.interpolated_paginator import InterpolatedPaginator from airbyte_cdk.sources.declarative.requesters.paginators.paginator import Paginator +from airbyte_cdk.sources.declarative.types import Config class NextPageUrlPaginator(Paginator): - def __init__(self, url_base: str = None, interpolated_paginator: InterpolatedPaginator = None, kwargs=None): - if kwargs is None: - kwargs = dict() - self._url_base = url_base or kwargs.get("url_base") - self._interpolated_paginator = interpolated_paginator or kwargs.get("interpolated_paginator") + """ + A paginator wrapper that delegates to an inner paginator and removes the base url from the next_page_token to only return the path to the next page + """ + + def __init__( + self, + url_base: str = None, + next_page_token_template: Optional[Mapping[str, str]] = None, + config: Optional[Config] = None, + ): + """ + :param url_base: url base to remove from the token + :param interpolated_paginator: optional paginator to delegate to + :param next_page_token_template: optional mapping to delegate to if interpolated_paginator is None + :param config: connection config + """ + + self._url_base = url_base + self._interpolated_paginator = InterpolatedPaginator(next_page_token_template=next_page_token_template, config=config) def next_page_token(self, response: requests.Response, last_records: List[Mapping[str, Any]]) -> Optional[Mapping[str, Any]]: next_page_token = self._interpolated_paginator.next_page_token(response, last_records) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/request_options/__init__.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/request_options/__init__.py new file mode 100644 index 000000000000..1100c1c58cf5 --- /dev/null +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/request_options/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/request_options/interpolated_request_options_provider.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/request_options/interpolated_request_options_provider.py new file mode 100644 index 000000000000..1fcb5fe58890 --- /dev/null +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/request_options/interpolated_request_options_provider.py @@ -0,0 +1,58 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from typing import Any, Mapping, MutableMapping, Optional, Union + +from airbyte_cdk.sources.declarative.requesters.interpolated_request_input_provider import InterpolatedRequestInputProvider +from airbyte_cdk.sources.declarative.requesters.request_options.request_options_provider import RequestOptionsProvider + + +class InterpolatedRequestOptionsProvider(RequestOptionsProvider): + def __init__(self, *, config, request_parameters=None, request_headers=None, request_body_data=None, request_body_json=None): + if request_parameters is None: + request_parameters = {} + if request_headers is None: + request_headers = {} + if request_body_data is None: + request_body_data = "" + if request_body_json is None: + request_body_json = {} + + if request_body_json and request_body_data: + raise ValueError("RequestOptionsProvider should only contain either 'request_body_data' or 'request_body_json' not both") + + self._parameter_interpolator = InterpolatedRequestInputProvider(config=config, request_inputs=request_parameters) + self._headers_interpolator = InterpolatedRequestInputProvider(config=config, request_inputs=request_headers) + self._body_data_interpolator = InterpolatedRequestInputProvider(config=config, request_inputs=request_body_data) + self._body_json_interpolator = InterpolatedRequestInputProvider(config=config, request_inputs=request_body_json) + + def request_params( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> MutableMapping[str, Any]: + interpolated_value = self._parameter_interpolator.request_inputs(stream_state, stream_slice, next_page_token) + if isinstance(interpolated_value, dict): + return interpolated_value + return {} + + def request_headers( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> Mapping[str, Any]: + return self._headers_interpolator.request_inputs(stream_state, stream_slice, next_page_token) + + def request_body_data( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> Optional[Union[Mapping, str]]: + return self._body_data_interpolator.request_inputs(stream_state, stream_slice, next_page_token) + + def request_body_json( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> Optional[Mapping]: + return self._body_json_interpolator.request_inputs(stream_state, stream_slice, next_page_token) + + def request_kwargs( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> Mapping[str, Any]: + # todo: there are a few integrations that override the request_kwargs() method, but the use case for why kwargs over existing + # constructs is a little unclear. We may revisit this, but for now lets leave it out of the DSL + return {} diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/request_options/request_options_provider.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/request_options/request_options_provider.py new file mode 100644 index 000000000000..3c211df24127 --- /dev/null +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/request_options/request_options_provider.py @@ -0,0 +1,38 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from abc import ABC, abstractmethod +from typing import Any, Mapping, MutableMapping, Optional, Union + + +class RequestOptionsProvider(ABC): + @abstractmethod + def request_params( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> MutableMapping[str, Any]: + pass + + @abstractmethod + def request_body_data( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> Optional[Union[Mapping, str]]: + pass + + @abstractmethod + def request_body_json( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> Optional[Mapping]: + pass + + @abstractmethod + def request_kwargs( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> Mapping[str, Any]: + pass + + @abstractmethod + def request_headers( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> Mapping[str, Any]: + pass diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/request_params/interpolated_request_parameter_provider.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/request_params/interpolated_request_parameter_provider.py deleted file mode 100644 index 17afe7d9feca..000000000000 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/request_params/interpolated_request_parameter_provider.py +++ /dev/null @@ -1,20 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - -from typing import Any, Mapping, MutableMapping - -from airbyte_cdk.sources.declarative.requesters.interpolated_request_input_provider import InterpolatedRequestInputProvider -from airbyte_cdk.sources.declarative.requesters.request_params.request_parameters_provider import RequestParameterProvider - - -class InterpolatedRequestParameterProvider(RequestParameterProvider): - def __init__(self, *, config, request_parameters=None): - if request_parameters is None: - request_parameters = {} - self._interpolator = InterpolatedRequestInputProvider(config=config, request_inputs=request_parameters) - - def request_params( - self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None - ) -> MutableMapping[str, Any]: - return self._interpolator.request_inputs(stream_state, stream_slice, next_page_token) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/request_params/request_parameters_provider.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/request_params/request_parameters_provider.py deleted file mode 100644 index 30f1431695eb..000000000000 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/request_params/request_parameters_provider.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - -from abc import ABC, abstractmethod -from typing import Any, Mapping, MutableMapping - - -class RequestParameterProvider(ABC): - @abstractmethod - def request_params( - self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None - ) -> MutableMapping[str, Any]: - pass diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/requester.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/requester.py index 2dba6311415a..7ce5f3aeeb81 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/requester.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/requester.py @@ -12,6 +12,7 @@ class HttpMethod(Enum): GET = "GET" + POST = "POST" class Requester(ABC): diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/retrievers/simple_retriever.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/retrievers/simple_retriever.py index 1ffe2546f85f..cd22ec622862 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/retrievers/simple_retriever.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/retrievers/simple_retriever.py @@ -6,11 +6,13 @@ import requests from airbyte_cdk.models import SyncMode -from airbyte_cdk.sources.declarative.extractors.http_extractor import HttpExtractor +from airbyte_cdk.sources.declarative.extractors.http_selector import HttpSelector from airbyte_cdk.sources.declarative.requesters.paginators.paginator import Paginator from airbyte_cdk.sources.declarative.requesters.requester import Requester from airbyte_cdk.sources.declarative.retrievers.retriever import Retriever +from airbyte_cdk.sources.declarative.states.dict_state import DictState from airbyte_cdk.sources.declarative.states.state import State +from airbyte_cdk.sources.declarative.stream_slicers.single_slice import SingleSlice from airbyte_cdk.sources.declarative.stream_slicers.stream_slicer import StreamSlicer from airbyte_cdk.sources.streams.http import HttpStream @@ -22,18 +24,18 @@ def __init__( primary_key, requester: Requester, paginator: Paginator, - extractor: HttpExtractor, - stream_slicer: StreamSlicer, - state: State, + record_selector: HttpSelector, + stream_slicer: Optional[StreamSlicer] = SingleSlice, + state: Optional[State] = None, ): self._name = name self._primary_key = primary_key self._paginator = paginator self._requester = requester - self._extractor = extractor + self._record_selector = record_selector super().__init__(self._requester.get_authenticator()) self._iterator: StreamSlicer = stream_slicer - self._state: State = state.deep_copy() + self._state: State = (state or DictState()).deep_copy() self._last_response = None self._last_records = None @@ -190,7 +192,9 @@ def parse_response( next_page_token: Mapping[str, Any] = None, ) -> Iterable[Mapping]: self._last_response = response - records = self._extractor.extract_records(response) + records = self._record_selector.select_records( + response=response, stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token + ) self._last_records = records return records diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/stream_slicers/cartesian_product_stream_slicer.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/stream_slicers/cartesian_product_stream_slicer.py new file mode 100644 index 000000000000..a627dd353df5 --- /dev/null +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/stream_slicers/cartesian_product_stream_slicer.py @@ -0,0 +1,35 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import itertools +from collections import ChainMap +from typing import Any, Iterable, List, Mapping + +from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources.declarative.stream_slicers.stream_slicer import StreamSlicer + + +class CartesianProductStreamSlicer(StreamSlicer): + """ + Stream slicers that iterates over the cartesian product of input stream slicers + Given 2 stream slicers with the following slices: + A: [{"i": 0}, {"i": 1}, {"i": 2}] + B: [{"s": "hello"}, {"s": "world"}] + the resulting stream slices are + [ + {"i": 0, "s": "hello"}, + {"i": 0, "s": "world"}, + {"i": 1, "s": "hello"}, + {"i": 1, "s": "world"}, + {"i": 2, "s": "hello"}, + {"i": 2, "s": "world"}, + ] + """ + + def __init__(self, stream_slicers: List[StreamSlicer]): + self._stream_slicers = stream_slicers + + def stream_slices(self, sync_mode: SyncMode, stream_state: Mapping[str, Any]) -> Iterable[Mapping[str, Any]]: + sub_slices = (s.stream_slices(sync_mode, stream_state) for s in self._stream_slicers) + return (ChainMap(*a) for a in itertools.product(*sub_slices)) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/stream_slicers/list_stream_slicer.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/stream_slicers/list_stream_slicer.py new file mode 100644 index 000000000000..c68e9fa77ef0 --- /dev/null +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/stream_slicers/list_stream_slicer.py @@ -0,0 +1,30 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import ast +from typing import Any, Iterable, List, Mapping, Union + +from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources.declarative.interpolation.interpolated_mapping import InterpolatedMapping +from airbyte_cdk.sources.declarative.interpolation.jinja import JinjaInterpolation +from airbyte_cdk.sources.declarative.stream_slicers.stream_slicer import StreamSlicer +from airbyte_cdk.sources.declarative.types import Config + + +class ListStreamSlicer(StreamSlicer): + """ + Stream slicer that iterates over the values of a list + If slice_values is a string, then evaluate it as literal and assert the resulting literal is a list + """ + + def __init__(self, slice_values: Union[str, List[str]], slice_definition: Mapping[str, Any], config: Config): + if isinstance(slice_values, str): + slice_values = ast.literal_eval(slice_values) + assert isinstance(slice_values, list) + self._interpolation = InterpolatedMapping(slice_definition, JinjaInterpolation()) + self._slice_values = slice_values + self._config = config + + def stream_slices(self, sync_mode: SyncMode, stream_state: Mapping[str, Any]) -> Iterable[Mapping[str, Any]]: + return [self._interpolation.eval(self._config, slice_value=slice_value) for slice_value in self._slice_values] diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/yaml_declarative_source.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/yaml_declarative_source.py index 3184e9e942f8..ea713b2e8c64 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/yaml_declarative_source.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/yaml_declarative_source.py @@ -17,9 +17,16 @@ def __init__(self, path_to_yaml): @property def connection_checker(self): - return self._factory.create_component(self._source_config["check"], dict())(source=self) + check = self._source_config["check"] + if "class_name" not in check: + check["class_name"] = "airbyte_cdk.sources.declarative.checks.check_stream.CheckStream" + return self._factory.create_component(check, dict())(source=self) def streams(self, config: Mapping[str, Any]) -> List[Stream]: + stream_configs = self._source_config["streams"] + for s in stream_configs: + if "class_name" not in s: + s["class_name"] = "airbyte_cdk.sources.declarative.declarative_stream.DeclarativeStream" return [self._factory.create_component(stream_config, config)() for stream_config in self._source_config["streams"]] def _read_and_parse_yaml_file(self, path_to_yaml_file): diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/http/auth/__init__.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/auth/__init__.py index 32a5245229e9..494c395d3ad3 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/streams/http/auth/__init__.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/auth/__init__.py @@ -1,13 +1,14 @@ # -# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. # # Initialize Auth Package from .core import HttpAuthenticator, NoAuth from .oauth import Oauth2Authenticator -from .token import MultipleTokenAuthenticator, TokenAuthenticator +from .token import BasicHttpAuthenticator, MultipleTokenAuthenticator, TokenAuthenticator __all__ = [ + "BasicHttpAuthenticator", "HttpAuthenticator", "NoAuth", "Oauth2Authenticator", diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/http/auth/oauth.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/auth/oauth.py index e66e0be8bded..2ec43ed5a425 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/streams/http/auth/oauth.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/auth/oauth.py @@ -3,7 +3,7 @@ # -from typing import Any, List, Mapping, MutableMapping, Tuple +from typing import Any, List, Mapping, MutableMapping, Optional, Tuple import pendulum import requests @@ -26,7 +26,8 @@ def __init__( client_secret: str, refresh_token: str, scopes: List[str] = None, - refresh_access_token_headers: Mapping[str, Any] = None, + refresh_access_token_headers: Optional[Mapping[str, Any]] = None, + refresh_access_token_authenticator: Optional[HttpAuthenticator] = None, ): self.token_refresh_endpoint = token_refresh_endpoint self.client_secret = client_secret @@ -34,6 +35,7 @@ def __init__( self.refresh_token = refresh_token self.scopes = scopes self.refresh_access_token_headers = refresh_access_token_headers + self.refresh_access_token_authenticator = refresh_access_token_authenticator self._token_expiry_date = pendulum.now().subtract(days=1) self._access_token = None @@ -76,10 +78,19 @@ def refresh_access_token(self) -> Tuple[str, int]: method="POST", url=self.token_refresh_endpoint, data=self.get_refresh_request_body(), - headers=self.refresh_access_token_headers, + headers=self.get_refresh_access_token_headers(), ) response.raise_for_status() response_json = response.json() return response_json["access_token"], response_json["expires_in"] except Exception as e: raise Exception(f"Error while refreshing access token: {e}") from e + + def get_refresh_access_token_headers(self): + headers = {} + if self.refresh_access_token_headers: + headers = self.refresh_access_token_headers + if self.refresh_access_token_authenticator: + refresh_auth_headers = self.refresh_access_token_authenticator.get_auth_header() + headers.update(refresh_auth_headers) + return headers diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/http/auth/token.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/auth/token.py index f9a2d4454a3a..938ec27f87f5 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/streams/http/auth/token.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/auth/token.py @@ -3,6 +3,7 @@ # +import base64 from itertools import cycle from typing import Any, List, Mapping @@ -32,3 +33,15 @@ def __init__(self, tokens: List[str], auth_method: str = "Bearer", auth_header: def get_auth_header(self) -> Mapping[str, Any]: return {self.auth_header: f"{self.auth_method} {next(self._tokens_iter)}"} + + +class BasicHttpAuthenticator(TokenAuthenticator): + """ + Builds auth based off the basic authentication scheme as defined by RFC 7617, which transmits credentials as USER ID/password pairs, encoded using bas64 + https://developer.mozilla.org/en-US/docs/Web/HTTP/Authentication#basic_authentication_scheme + """ + + def __init__(self, username: str, password: str, auth_method: str = "Basic", auth_header: str = "Authorization"): + auth_string = f"{username}:{password}".encode("utf8") + b64_encoded = base64.b64encode(auth_string).decode("utf8") + super().__init__(b64_encoded, auth_method, auth_header) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/http/http.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/http.py index 528711ea6bd1..e89d590026fe 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/streams/http/http.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/http.py @@ -14,7 +14,6 @@ import vcr.cassette as Cassette from airbyte_cdk.models import SyncMode from airbyte_cdk.sources.streams.core import Stream -from airbyte_cdk.sources.utils.sentry import AirbyteSentry from requests.auth import AuthBase from .auth.core import HttpAuthenticator, NoAuth @@ -22,7 +21,7 @@ from .rate_limiting import default_backoff_handler, user_defined_backoff_handler # list of all possible HTTP methods which can be used for sending of request bodies -BODY_REQUEST_METHODS = ("POST", "PUT", "PATCH") +BODY_REQUEST_METHODS = ("GET", "POST", "PUT", "PATCH") logging.getLogger("vcr").setLevel(logging.ERROR) @@ -248,7 +247,12 @@ def backoff_time(self, response: requests.Response) -> Optional[float]: return None def _create_prepared_request( - self, path: str, headers: Mapping = None, params: Mapping = None, json: Any = None, data: Any = None + self, + path: str, + headers: Mapping = None, + params: Mapping = None, + json: Any = None, + data: Any = None, ) -> requests.PreparedRequest: args = {"method": self.http_method, "url": urljoin(self.url_base, path), "headers": headers, "params": params} if self.http_method.upper() in BODY_REQUEST_METHODS: @@ -282,9 +286,7 @@ def _send(self, request: requests.PreparedRequest, request_kwargs: Mapping[str, Unexpected transient exceptions use the default backoff parameters. Unexpected persistent exceptions are not handled and will cause the sync to fail. """ - AirbyteSentry.add_breadcrumb(message=f"Issue {request.url}", data=request_kwargs) - with AirbyteSentry.start_transaction_span(op="_send", description=request.url): - response: requests.Response = self._session.send(request, **request_kwargs) + response: requests.Response = self._session.send(request, **request_kwargs) if self.should_retry(response): custom_backoff_time = self.backoff_time(response) @@ -328,12 +330,10 @@ def _send_request(self, request: requests.PreparedRequest, request_kwargs: Mappi """ if max_tries is not None: max_tries = max(0, max_tries) + 1 - AirbyteSentry.set_context("request", {"url": request.url, "headers": request.headers, "args": request_kwargs}) - with AirbyteSentry.start_transaction_span(op="_send_request"): - user_backoff_handler = user_defined_backoff_handler(max_tries=max_tries)(self._send) - backoff_handler = default_backoff_handler(max_tries=max_tries, factor=self.retry_factor) - return backoff_handler(user_backoff_handler)(request, request_kwargs) + user_backoff_handler = user_defined_backoff_handler(max_tries=max_tries)(self._send) + backoff_handler = default_backoff_handler(max_tries=max_tries, factor=self.retry_factor) + return backoff_handler(user_backoff_handler)(request, request_kwargs) def parse_response_error_message(self, response: requests.Response) -> Optional[str]: """ @@ -394,38 +394,35 @@ def read_records( pagination_complete = False next_page_token = None - with AirbyteSentry.start_transaction("read_records", self.name), AirbyteSentry.start_transaction_span("read_records"): - while not pagination_complete: - request_headers = self.request_headers( - stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token - ) - request = self._create_prepared_request( - path=self.path(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token), - headers=dict(request_headers, **self.authenticator.get_auth_header()), - params=self.request_params(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token), - json=self.request_body_json(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token), - data=self.request_body_data(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token), - ) - request_kwargs = self.request_kwargs(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token) - - if self.use_cache: - # use context manager to handle and store cassette metadata - with self.cache_file as cass: - self.cassete = cass - # vcr tries to find records based on the request, if such records exist, return from cache file - # else make a request and save record in cache file - response = self._send_request(request, request_kwargs) - - else: + while not pagination_complete: + request_headers = self.request_headers(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token) + request = self._create_prepared_request( + path=self.path(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token), + headers=dict(request_headers, **self.authenticator.get_auth_header()), + params=self.request_params(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token), + json=self.request_body_json(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token), + data=self.request_body_data(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token), + ) + request_kwargs = self.request_kwargs(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token) + + if self.use_cache: + # use context manager to handle and store cassette metadata + with self.cache_file as cass: + self.cassete = cass + # vcr tries to find records based on the request, if such records exist, return from cache file + # else make a request and save record in cache file response = self._send_request(request, request_kwargs) - yield from self.parse_response(response, stream_state=stream_state, stream_slice=stream_slice) - next_page_token = self.next_page_token(response) - if not next_page_token: - pagination_complete = True + else: + response = self._send_request(request, request_kwargs) + yield from self.parse_response(response, stream_state=stream_state, stream_slice=stream_slice) + + next_page_token = self.next_page_token(response) + if not next_page_token: + pagination_complete = True - # Always return an empty generator just in case no records were ever yielded - yield from [] + # Always return an empty generator just in case no records were ever yielded + yield from [] class HttpSubStream(HttpStream, ABC): diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/http/requests_native_auth/token.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/requests_native_auth/token.py index 4436ec316a0d..d117c24a44bb 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/streams/http/requests_native_auth/token.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/requests_native_auth/token.py @@ -2,6 +2,7 @@ # Copyright (c) 2022 Airbyte, Inc., all rights reserved. # +import base64 from itertools import cycle from typing import Any, List, Mapping @@ -37,3 +38,15 @@ class TokenAuthenticator(MultipleTokenAuthenticator): def __init__(self, token: str, auth_method: str = "Bearer", auth_header: str = "Authorization"): super().__init__([token], auth_method, auth_header) + + +class BasicHttpAuthenticator(TokenAuthenticator): + """ + Builds auth based off the basic authentication scheme as defined by RFC 7617, which transmits credentials as USER ID/password pairs, encoded using bas64 + https://developer.mozilla.org/en-US/docs/Web/HTTP/Authentication#basic_authentication_scheme + """ + + def __init__(self, username: str, password: str, auth_method: str = "Basic", auth_header: str = "Authorization"): + auth_string = f"{username}:{password}".encode("utf8") + b64_encoded = base64.b64encode(auth_string).decode("utf8") + super().__init__(b64_encoded, auth_method, auth_header) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/utils/schema_helpers.py b/airbyte-cdk/python/airbyte_cdk/sources/utils/schema_helpers.py index c65a0d537c2e..be8e257d600a 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/utils/schema_helpers.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/utils/schema_helpers.py @@ -7,9 +7,8 @@ import json import os import pkgutil -from typing import Any, ClassVar, Dict, List, Mapping, MutableMapping, Optional, Set, Tuple, Union +from typing import Any, ClassVar, Dict, List, Mapping, MutableMapping, Optional, Tuple, Union -import dpath.util import jsonref from airbyte_cdk.models import ConnectorSpecification from jsonschema import RefResolver, validate @@ -192,32 +191,3 @@ def split_config(config: Mapping[str, Any]) -> Tuple[dict, InternalConfig]: else: main_config[k] = v return main_config, InternalConfig.parse_obj(internal_config) - - -def get_secret_values(schema: Mapping[str, Any], config: Mapping[str, Any]) -> List[str]: - def get_secret_pathes(schema: Mapping[str, Any]) -> Set[str]: - pathes = set() - - def traverse_schema(schema: Any, path: List[str]): - if isinstance(schema, dict): - for k, v in schema.items(): - traverse_schema(v, [*path, k]) - elif isinstance(schema, list): - for i in schema: - traverse_schema(i, path) - else: - if path[-1] == "airbyte_secret" and schema is True: - path_str = "/".join([p for p in path[:-1] if p not in ["properties", "oneOf"]]) - pathes.add(path_str) - - traverse_schema(schema, []) - return pathes - - secret_pathes = get_secret_pathes(schema) - result = [] - for path in secret_pathes: - try: - result.append(dpath.util.get(config, path)) - except KeyError: - pass - return result diff --git a/airbyte-cdk/python/airbyte_cdk/sources/utils/sentry.py b/airbyte-cdk/python/airbyte_cdk/sources/utils/sentry.py deleted file mode 100644 index 395c2958afa7..000000000000 --- a/airbyte-cdk/python/airbyte_cdk/sources/utils/sentry.py +++ /dev/null @@ -1,240 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - -import contextlib -import os -import re -from typing import Any, Callable, List, Optional, Type, Union -from uuid import uuid4 - -import sentry_sdk -from sentry_sdk.integrations.atexit import AtexitIntegration -from sentry_sdk.integrations.excepthook import ExcepthookIntegration -from sentry_sdk.integrations.logging import LoggingIntegration - - -class AirbyteSentry: - """ - Class for working with sentry sdk. It provides methods to: - - init sentry sdk based on env variable - - add breadcrumbs and set context - - work with transactions and transaction spans - - set tag and capture message and capture exception - Also it implements client side sensitive data scrubbing. - """ - - DSN_ENV_NAME = "SENTRY_DSN" - SECRET_MASK = "***" - # Maximum number of breadcrumbs to send on fail. Breadcrumbs is trail of - # events that occured before the fail and being sent to server only - # if handled or unhandled exception occured. - MAX_BREADCRUMBS = 30 - # Event sending rate. could be from 0 (0%) to 1.0 (100 % events being sent - # to sentry server) - TRACES_SAMPLE_RATE = 1.0 - SECRET_REGEXP = [ - re.compile("(api_key=)[a-zA-Z0-9_]+"), - re.compile("(access_token=)[a-zA-Z0-9_]+"), - re.compile("(refresh_token=)[a-zA-Z0-9_]+"), - re.compile("(token )[a-zA-Z0-9_]+"), - re.compile("(Bearer )[a-zA-Z0-9_]+"), - ] - SENSITIVE_KEYS = ["Authorization", "client_secret", "access_token"] - - sentry_enabled = False - source_tag = "" - run_id = str(uuid4()) - secret_values: List[str] = [] - - @classmethod - def process_value(cls, key: str, value: str): - """ - Process single value. Used by recursive replace_value method or - standalone for single value. - """ - for secret in cls.secret_values: - value = value.replace(secret, cls.SECRET_MASK) - if key in cls.SENSITIVE_KEYS: - return cls.SECRET_MASK - for regexp in cls.SECRET_REGEXP: - value = regexp.sub(f"\\1{cls.SECRET_MASK}", value) - return value - - @classmethod - def replace_value(cls, key, value): - """ - Recursively scan event and replace all sensitive data with SECRET_MASK. - Perform inplace data replace i.e. its not creating new object. - """ - if isinstance(value, dict): - for k, v in value.items(): - value[k] = cls.replace_value(k, v) - elif isinstance(value, list): - for index, v in enumerate(value): - value[index] = cls.replace_value(index, v) - elif isinstance(value, str): - return cls.process_value(key, value) - return value - - @classmethod - def filter_event(cls, event, hint): - """ - Callback for before_send sentry hook. - """ - if "message" in event: - event["message"] = cls.process_value(None, event["message"]) - cls.replace_value(None, event.get("exception")) - cls.replace_value(None, event.get("contexts")) - return event - - @classmethod - def filter_breadcrumb(cls, event, hint): - """ - Callback for before_breadcrumb sentry hook. - """ - cls.replace_value(None, event) - return event - - @classmethod - def init( - cls, - source_tag: str = None, - transport: Optional[Union[Type[sentry_sdk.transport.Transport], Callable[[Any], None]]] = None, - secret_values: List[str] = [], - ): - """ - Read sentry data source name (DSN) from env variable and initialize sentry cdk. - Args: - source_tag: str - Source name to be used in "source" tag for events organazing. - transport: Transport or Callable - transport object for transfering - sentry event to remote server. Usually used for testing, by default - HTTP transport used - secret_values: List[str] - list of string that have to be filtered - out before sending event to sentry server. - - """ - sentry_dsn = os.environ.get(cls.DSN_ENV_NAME) - if sentry_dsn: - cls.sentry_enabled = True - cls.secret_values = secret_values - sentry_sdk.init( - sentry_dsn, - max_breadcrumbs=cls.MAX_BREADCRUMBS, - traces_sample_rate=cls.TRACES_SAMPLE_RATE, - before_send=AirbyteSentry.filter_event, - before_breadcrumb=AirbyteSentry.filter_breadcrumb, - transport=transport, - # Use only limited list of integration cause sentry may send - # transaction events e.g. it could send httplib request with - # url and authorization info over StdlibIntegration and it - # would bypass before_send hook. - integrations=[ - ExcepthookIntegration(always_run=True), - AtexitIntegration(), - LoggingIntegration(), - ], - # Disable default integrations cause sentry does not allow to - # filter transactions event that could transfer sensitive data - default_integrations=False, - ) - if source_tag: - sentry_sdk.set_tag("source", source_tag) - sentry_sdk.set_tag("run_id", cls.run_id) - cls.source_tag = source_tag - - def if_enabled(f): - def wrapper(cls, *args, **kvargs): - if cls.sentry_enabled: - return f(cls, *args, **kvargs) - - return wrapper - - def if_enabled_else(return_value): - def if_enabled(f): - def wrapper(cls, *args, **kvargs): - if cls.sentry_enabled: - return f(cls, *args, **kvargs) - else: - return return_value - - return wrapper - - return if_enabled - - # according to issue CDK: typing errors #9500, mypy raises error on this line - # 'Argument 1 to "if_enabled" has incompatible type "Callable[[Type[AirbyteSentry], str, Any], Any]"; expected "AirbyteSentry"' - # there are a few similar opened issues - # https://github.com/python/mypy/issues/12110 - # https://github.com/python/mypy/issues/11619 - # ignored for now - @classmethod # type: ignore - @if_enabled - def set_tag(cls, tag_name: str, value: Any): - """ - Set tag that is handy for events organazing and filtering by sentry UI. - """ - sentry_sdk.set_tag(tag_name, value) - - # same ignored as for line 171 - @classmethod # type: ignore - @if_enabled - def add_breadcrumb(cls, message, data=None): - """ - Add sentry breadcrumb. - """ - sentry_sdk.add_breadcrumb(message=message, data=data) - - # same ignored as for line 171 - @classmethod # type: ignore - @if_enabled - def set_context(cls, name, data): - # Global context being used by transaction event as well. Since we cant - # filter senstitve data coming from transaction event using sentry - # before_event hook, apply filter to context here. - cls.replace_value(None, data) - sentry_sdk.set_context(name, data) - - # same ignored as for line 171 - @classmethod # type: ignore - @if_enabled - def capture_message(cls, message): - """ - Send message event to sentry. - """ - sentry_sdk.capture_message(message) - - # same ignored as for line 171 - @classmethod # type: ignore - @if_enabled - def capture_exception( - cls, - error: Optional[BaseException] = None, - scope: Optional[Any] = None, - **scope_args, - ): - """ - Report handled execption to sentry. - """ - sentry_sdk.capture_exception(error, scope=scope, **scope_args) - - # same ignored as for line 171 - @classmethod - @if_enabled_else(contextlib.nullcontext()) # type: ignore - def start_transaction(cls, op, name=None): - """ - Return context manager for starting sentry transaction for performance monitoring. - """ - return sentry_sdk.start_transaction(op=op, name=f"{cls.source_tag}.{name}") - - # same ignored as for line 171 - @classmethod - @if_enabled_else(contextlib.nullcontext()) # type: ignore - def start_transaction_span(cls, op, description=None): - """ - Return context manager for starting sentry transaction span inside existing sentry transaction. - """ - # Apply filter to description since we cannot use before_send sentry - # hook for transaction event. - description = cls.replace_value(None, description) - return sentry_sdk.start_span(op=op, description=description) diff --git a/airbyte-cdk/python/airbyte_cdk/utils/__init__.py b/airbyte-cdk/python/airbyte_cdk/utils/__init__.py index e69de29bb2d1..29556737b88b 100644 --- a/airbyte-cdk/python/airbyte_cdk/utils/__init__.py +++ b/airbyte-cdk/python/airbyte_cdk/utils/__init__.py @@ -0,0 +1,6 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# +from .traced_exception import AirbyteTracedException + +__all__ = ["AirbyteTracedException"] diff --git a/airbyte-cdk/python/airbyte_cdk/utils/airbyte_secrets_utils.py b/airbyte-cdk/python/airbyte_cdk/utils/airbyte_secrets_utils.py index 2ad00d6bfd40..41e615d628db 100644 --- a/airbyte-cdk/python/airbyte_cdk/utils/airbyte_secrets_utils.py +++ b/airbyte-cdk/python/airbyte_cdk/utils/airbyte_secrets_utils.py @@ -2,24 +2,55 @@ # Copyright (c) 2022 Airbyte, Inc., all rights reserved. # -import logging -from typing import TYPE_CHECKING, Any, List, Mapping +from typing import Any, List, Mapping -from airbyte_cdk.utils.mapping_utils import all_key_pairs_dot_notation, get_value_by_dot_notation +import dpath.util -if TYPE_CHECKING: - from airbyte_cdk.sources import Source +def get_secret_paths(spec: Mapping[str, Any]) -> List[List[str]]: + paths = [] -def get_secrets(source: "Source", config: Mapping[str, Any], logger: logging.Logger) -> List[Any]: + def traverse_schema(schema_item: Any, path: List[str]): + """ + schema_item can be any property or value in the originally input jsonschema, depending on how far down the recursion stack we go + path is the path to that schema item in the original input + for example if we have the input {'password': {'type': 'string', 'airbyte_secret': True}} then the arguments will evolve + as follows: + schema_item=, path=[] + schema_item={'type': 'string', 'airbyte_secret': True}, path=['password'] + schema_item='string', path=['password', 'type'] + schema_item=True, path=['password', 'airbyte_secret'] + """ + if isinstance(schema_item, dict): + for k, v in schema_item.items(): + traverse_schema(v, [*path, k]) + elif isinstance(schema_item, list): + for i in schema_item: + traverse_schema(i, path) + else: + if path[-1] == "airbyte_secret" and schema_item is True: + filtered_path = [p for p in path[:-1] if p not in ["properties", "oneOf"]] + paths.append(filtered_path) + + traverse_schema(spec, []) + return paths + + +def get_secrets(connection_specification: Mapping[str, Any], config: Mapping[str, Any]) -> List[Any]: """ - Get a list of secrets from the source config based on the source specification + Get a list of secret values from the source config based on the source specification + :type connection_specification: the connection_specification field of an AirbyteSpecification i.e the JSONSchema definition """ - flattened_key_values = all_key_pairs_dot_notation(source.spec(logger).connectionSpecification.get("properties", {})) - secret_key_names = [ - ".".join(key.split(".")[:1]) for key, value in flattened_key_values.items() if value and key.endswith("airbyte_secret") - ] - return [str(get_value_by_dot_notation(config, key)) for key in secret_key_names if config.get(key)] + secret_paths = get_secret_paths(connection_specification.get("properties", {})) + result = [] + for path in secret_paths: + try: + result.append(dpath.util.get(config, path)) + except KeyError: + # Since we try to get paths to all known secrets in the spec, in the case of oneOfs, some secret fields may not be present + # In that case, a KeyError is thrown. This is expected behavior. + pass + return result __SECRETS_FROM_CONFIG: List[str] = [] @@ -33,6 +64,8 @@ def update_secrets(secrets: List[str]): def filter_secrets(string: str) -> str: """Filter secrets from a string by replacing them with ****""" + # TODO this should perform a maximal match for each secret. if "x" and "xk" are both secret values, and this method is called twice on + # the input "xk", then depending on call order it might only obfuscate "*k". This is a bug. for secret in __SECRETS_FROM_CONFIG: - string = string.replace(secret, "****") + string = string.replace(str(secret), "****") return string diff --git a/airbyte-cdk/python/airbyte_cdk/utils/mapping_utils.py b/airbyte-cdk/python/airbyte_cdk/utils/mapping_utils.py deleted file mode 100644 index 62f954861e2e..000000000000 --- a/airbyte-cdk/python/airbyte_cdk/utils/mapping_utils.py +++ /dev/null @@ -1,41 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - -from functools import reduce -from typing import Any, Iterable, List, Mapping, Optional, Tuple - - -def all_key_pairs_dot_notation(dict_obj: Mapping) -> Mapping[str, Any]: - """ - Recursively iterate through a dictionary and return a dictionary of all key-value pairs in dot notation. - keys are prefixed with the list of keys passed in as prefix. - """ - - def _all_key_pairs_dot_notation(_dict_obj: Mapping, prefix: List[str] = []) -> Iterable[Tuple[str, Any]]: - for key, value in _dict_obj.items(): - if isinstance(value, dict): - prefix.append(str(key)) - yield from _all_key_pairs_dot_notation(value, prefix) - prefix.pop() - else: - prefix.append(str(key)) - yield ".".join(prefix), value - prefix.pop() - - return {k: v for k, v in _all_key_pairs_dot_notation(dict_obj)} - - -def get_value_by_dot_notation(dict_obj: Mapping, key: str, default: Optional[Any] = ...) -> Any: - """ - Return the value of a key in dot notation in a arbitrarily nested Mapping. - dict_obj: Mapping - key: str - default: Any - raises: KeyError if default is not provided and the key is not found - ex.: - dict_obj = {"nested": {"key": "value"}} - get_value_by_dot_notation(dict_obj, "nested.key") == "value" -> True - """ - - return reduce(lambda d, key_name: d[key_name] if default is ... else d.get(key_name, default), key.split("."), dict_obj) diff --git a/airbyte-cdk/python/docs/concepts/README.md b/airbyte-cdk/python/docs/concepts/README.md index cf5f9365232f..b7daf16cad4d 100644 --- a/airbyte-cdk/python/docs/concepts/README.md +++ b/airbyte-cdk/python/docs/concepts/README.md @@ -1,6 +1,6 @@ # Connector Development Kit Concepts -This concepts section serves as a general introduction to the Python CDK. Readers will certainly benefit from a deeper understanding of the [Airbyte Specification](https://docs.airbyte.io/architecture/airbyte-specification) before proceeding, but we do a quick overview of it in our basic concepts guide below. +This concepts section serves as a general introduction to the Python CDK. Readers will certainly benefit from a deeper understanding of the [Airbyte Specification](https://docs.airbyte.io/architecture/airbyte-protocol) before proceeding, but we do a quick overview of it in our basic concepts guide below. ### Basic Concepts If you want to learn more about the classes required to implement an Airbyte Source, head to our [basic concepts doc](basic-concepts.md). diff --git a/airbyte-cdk/python/docs/tutorials/cdk-tutorial-python-http/3-define-inputs.md b/airbyte-cdk/python/docs/tutorials/cdk-tutorial-python-http/3-define-inputs.md index 5f5ce8b76e11..8b4e9c54799b 100644 --- a/airbyte-cdk/python/docs/tutorials/cdk-tutorial-python-http/3-define-inputs.md +++ b/airbyte-cdk/python/docs/tutorials/cdk-tutorial-python-http/3-define-inputs.md @@ -4,7 +4,7 @@ Each connector declares the inputs it needs to read data from the underlying dat The simplest way to implement this is by creating a `.json` file in `source_/spec.json` which describes your connector's inputs according to the [ConnectorSpecification](https://github.com/airbytehq/airbyte/blob/master/airbyte-protocol/models/src/main/resources/airbyte_protocol/airbyte_protocol.yaml#L211) schema. This is a good place to start when developing your source. Using JsonSchema, define what the inputs are \(e.g. username and password\). Here's [an example](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-freshdesk/source_freshdesk/spec.json) of what the `spec.json` looks like for the Freshdesk API source. -For more details on what the spec is, you can read about the Airbyte Protocol [here](https://docs.airbyte.io/understanding-airbyte/airbyte-specification). +For more details on what the spec is, you can read about the Airbyte Protocol [here](https://docs.airbyte.io/understanding-airbyte/airbyte-protocol). The generated code that Airbyte provides, handles implementing the `spec` method for you. It assumes that there will be a file called `spec.json` in the same directory as `source.py`. If you have declared the necessary JsonSchema in `spec.json` you should be done with this step. diff --git a/airbyte-cdk/python/docs/tutorials/http_api_source.md b/airbyte-cdk/python/docs/tutorials/http_api_source.md index 3d8327596f29..97fb2a88d4a2 100644 --- a/airbyte-cdk/python/docs/tutorials/http_api_source.md +++ b/airbyte-cdk/python/docs/tutorials/http_api_source.md @@ -119,7 +119,7 @@ Each connector declares the inputs it needs to read data from the underlying dat The simplest way to implement this is by creating a `.json` file in `source_/spec.json` which describes your connector's inputs according to the [ConnectorSpecification](https://github.com/airbytehq/airbyte/blob/master/airbyte-protocol/models/src/main/resources/airbyte_protocol/airbyte_protocol.yaml#L211) schema. This is a good place to start when developing your source. Using JsonSchema, define what the inputs are \(e.g. username and password\). Here's [an example](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-freshdesk/source_freshdesk/spec.json) of what the `spec.json` looks like for the Freshdesk API source. -For more details on what the spec is, you can read about the Airbyte Protocol [here](https://docs.airbyte.io/understanding-airbyte/airbyte-specification#the-airbyte-protocol). +For more details on what the spec is, you can read about the Airbyte Protocol [here](https://docs.airbyte.io/understanding-airbyte/airbyte-protocol#the-airbyte-protocol). The generated code that Airbyte provides, handles implementing the `spec` method for you. It assumes that there will be a file called `spec.json` in the same directory as `source.py`. If you have declared the necessary JsonSchema in `spec.json` you should be done with this step. diff --git a/airbyte-cdk/python/setup.py b/airbyte-cdk/python/setup.py index 57683095c313..53ef5967d6d9 100644 --- a/airbyte-cdk/python/setup.py +++ b/airbyte-cdk/python/setup.py @@ -15,7 +15,7 @@ setup( name="airbyte-cdk", - version="0.1.60", + version="0.1.62", description="A framework for writing Airbyte Connectors.", long_description=README, long_description_content_type="text/markdown", @@ -51,7 +51,6 @@ "pydantic~=1.6", "PyYAML~=5.4", "requests", - "sentry-sdk~=1.5.1", "vcrpy", "Deprecated~=1.2", "Jinja2~=3.1.2", diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/extractors/test_jello.py b/airbyte-cdk/python/unit_tests/sources/declarative/extractors/test_jello.py index c19c0fdeb725..dd1a83494190 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/extractors/test_jello.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/extractors/test_jello.py @@ -4,63 +4,51 @@ import json +import pytest import requests from airbyte_cdk.sources.declarative.decoders.json_decoder import JsonDecoder from airbyte_cdk.sources.declarative.extractors.jello import JelloExtractor config = {"field": "record_array"} -decoder = JsonDecoder() - - -def test(): - transform = "_.data" - extractor = JelloExtractor(transform, decoder, config) - - records = [{"id": 1}, {"id": 2}] - body = {"data": records} - response = create_response(body) - actual_records = extractor.extract_records(response) - - assert actual_records == records - - -def test_field_in_config(): - transform = "_.{{ config['field'] }}" - extractor = JelloExtractor(transform, decoder, config) +kwargs = {"data_field": "records"} - records = [{"id": 1}, {"id": 2}] - body = {"record_array": records} - response = create_response(body) - actual_records = extractor.extract_records(response) - - assert actual_records == records +decoder = JsonDecoder() -def test_field_in_kwargs(): - transform = "_.{{ kwargs['data_field'] }}" - kwargs = {"data_field": "records"} +@pytest.mark.parametrize( + "test_name, transform, body, expected_records", + [ + ("test_extract_from_array", "_.data", {"data": [{"id": 1}, {"id": 2}]}, [{"id": 1}, {"id": 2}]), + ("test_field_in_config", "_.{{ config['field'] }}", {"record_array": [{"id": 1}, {"id": 2}]}, [{"id": 1}, {"id": 2}]), + ("test_field_in_kwargs", "_.{{ kwargs['data_field'] }}", {"records": [{"id": 1}, {"id": 2}]}, [{"id": 1}, {"id": 2}]), + ("test_default", "_{{kwargs['field']}}", [{"id": 1}, {"id": 2}], [{"id": 1}, {"id": 2}]), + ( + "test_remove_fields_from_records", + "[{k:v for k,v in d.items() if k != 'value_to_remove'} for d in _.data]", + {"data": [{"id": 1, "value": "HELLO", "value_to_remove": "fail"}, {"id": 2, "value": "WORLD", "value_to_remove": "fail"}]}, + [{"id": 1, "value": "HELLO"}, {"id": 2, "value": "WORLD"}], + ), + ( + "test_add_fields_from_records", + "[{**{k:v for k,v in d.items()}, **{'project_id': d['project']['id']}} for d in _.data]", + {"data": [{"id": 1, "value": "HELLO", "project": {"id": 8}}, {"id": 2, "value": "WORLD", "project": {"id": 9}}]}, + [ + {"id": 1, "value": "HELLO", "project_id": 8, "project": {"id": 8}}, + {"id": 2, "value": "WORLD", "project_id": 9, "project": {"id": 9}}, + ], + ), + ], +) +def test(test_name, transform, body, expected_records): extractor = JelloExtractor(transform, decoder, config, kwargs=kwargs) - records = [{"id": 1}, {"id": 2}] - body = {"records": records} response = create_response(body) actual_records = extractor.extract_records(response) - assert actual_records == records + assert actual_records == expected_records def create_response(body): response = requests.Response() response._content = json.dumps(body).encode("utf-8") return response - - -def test_default(): - transform = "_{{kwargs['field']}}" - extractor = JelloExtractor(transform, decoder, config) - - records = [{"id": 1}, {"id": 2}] - response = create_response(records) - actual_records = extractor.extract_records(response) - - assert actual_records == records diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/extractors/test_record_filter.py b/airbyte-cdk/python/unit_tests/sources/declarative/extractors/test_record_filter.py new file mode 100644 index 000000000000..2b180ee5d935 --- /dev/null +++ b/airbyte-cdk/python/unit_tests/sources/declarative/extractors/test_record_filter.py @@ -0,0 +1,48 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import pytest +from airbyte_cdk.sources.declarative.extractors.record_filter import RecordFilter + + +@pytest.mark.parametrize( + "test_name, filter_template, records, expected_records", + [ + ( + "test_using_state_filter", + "{{ record['created_at'] > stream_state['created_at'] }}", + [{"id": 1, "created_at": "06-06-21"}, {"id": 2, "created_at": "06-07-21"}, {"id": 3, "created_at": "06-08-21"}], + [{"id": 2, "created_at": "06-07-21"}, {"id": 3, "created_at": "06-08-21"}], + ), + ( + "test_with_slice_filter", + "{{ record['last_seen'] >= stream_slice['last_seen'] }}", + [{"id": 1, "last_seen": "06-06-21"}, {"id": 2, "last_seen": "06-07-21"}, {"id": 3, "last_seen": "06-10-21"}], + [{"id": 3, "last_seen": "06-10-21"}], + ), + ( + "test_with_next_page_token_filter", + "{{ record['id'] >= next_page_token['last_seen_id'] }}", + [{"id": 11}, {"id": 12}, {"id": 13}, {"id": 14}, {"id": 15}], + [{"id": 14}, {"id": 15}], + ), + ( + "test_missing_filter_fields_return_no_results", + "{{ record['id'] >= next_page_token['path_to_nowhere'] }}", + [{"id": 11}, {"id": 12}, {"id": 13}, {"id": 14}, {"id": 15}], + [], + ), + ], +) +def test_record_filter(test_name, filter_template, records, expected_records): + config = {"response_override": "stop_if_you_see_me"} + stream_state = {"created_at": "06-06-21"} + stream_slice = {"last_seen": "06-10-21"} + next_page_token = {"last_seen_id": 14} + record_filter = RecordFilter(config=config, condition=filter_template) + + actual_records = record_filter.filter_records( + records, stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token + ) + assert actual_records == expected_records diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/extractors/test_record_selector.py b/airbyte-cdk/python/unit_tests/sources/declarative/extractors/test_record_selector.py new file mode 100644 index 000000000000..9bdcd0711e5e --- /dev/null +++ b/airbyte-cdk/python/unit_tests/sources/declarative/extractors/test_record_selector.py @@ -0,0 +1,58 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import json + +import pytest +import requests +from airbyte_cdk.sources.declarative.decoders.json_decoder import JsonDecoder +from airbyte_cdk.sources.declarative.extractors.jello import JelloExtractor +from airbyte_cdk.sources.declarative.extractors.record_filter import RecordFilter +from airbyte_cdk.sources.declarative.extractors.record_selector import RecordSelector + + +@pytest.mark.parametrize( + "test_name, transform_template, filter_template, body, expected_records", + [ + ( + "test_with_extractor_and_filter", + "_.data", + "{{ record['created_at'] > stream_state['created_at'] }}", + {"data": [{"id": 1, "created_at": "06-06-21"}, {"id": 2, "created_at": "06-07-21"}, {"id": 3, "created_at": "06-08-21"}]}, + [{"id": 2, "created_at": "06-07-21"}, {"id": 3, "created_at": "06-08-21"}], + ), + ( + "test_no_record_filter_returns_all_records", + "_.data", + None, + {"data": [{"id": 1, "created_at": "06-06-21"}, {"id": 2, "created_at": "06-07-21"}]}, + [{"id": 1, "created_at": "06-06-21"}, {"id": 2, "created_at": "06-07-21"}], + ), + ], +) +def test_record_filter(test_name, transform_template, filter_template, body, expected_records): + config = {"response_override": "stop_if_you_see_me"} + stream_state = {"created_at": "06-06-21"} + stream_slice = {"last_seen": "06-10-21"} + next_page_token = {"last_seen_id": 14} + + response = create_response(body) + decoder = JsonDecoder() + extractor = JelloExtractor(transform=transform_template, decoder=decoder, config=config, kwargs={}) + if filter_template is None: + record_filter = None + else: + record_filter = RecordFilter(config=config, condition=filter_template) + record_selector = RecordSelector(extractor=extractor, record_filter=record_filter) + + actual_records = record_selector.select_records( + response=response, stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token + ) + assert actual_records == expected_records + + +def create_response(body): + response = requests.Response() + response._content = json.dumps(body).encode("utf-8") + return response diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/interpolation/test_interpolated_boolean.py b/airbyte-cdk/python/unit_tests/sources/declarative/interpolation/test_interpolated_boolean.py index 0fb5bfe64532..eb6b2397083d 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/interpolation/test_interpolated_boolean.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/interpolation/test_interpolated_boolean.py @@ -10,6 +10,7 @@ "string_key": "compare_me", "zero_value": 0, "empty_array": [], + "non_empty_array": [1], "empty_dict": {}, "empty_tuple": (), } @@ -29,6 +30,8 @@ ("test_empty_dict_is_false", "{{ config['empty_dict'] }}", False), ("test_empty_tuple_is_false", "{{ config['empty_tuple'] }}", False), ("test_lowercase_false", '{{ "false" }}', False), + ("test_value_in_array", "{{ 1 in config['non_empty_array'] }}", True), + ("test_value_not_in_array", "{{ 2 in config['non_empty_array'] }}", False), ], ) def test_interpolated_boolean(test_name, template, expected_result): diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/interpolation/test_interpolated_request_parameter_provider.py b/airbyte-cdk/python/unit_tests/sources/declarative/interpolation/test_interpolated_request_parameter_provider.py deleted file mode 100644 index 1699a62a9497..000000000000 --- a/airbyte-cdk/python/unit_tests/sources/declarative/interpolation/test_interpolated_request_parameter_provider.py +++ /dev/null @@ -1,78 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - - -from airbyte_cdk.sources.declarative.requesters.request_params.interpolated_request_parameter_provider import ( - InterpolatedRequestParameterProvider, -) - -state = {"date": "2021-01-01"} -stream_slice = {"start_date": "2020-01-01"} -next_page_token = {"offset": "12345"} -config = {"option": "OPTION"} - - -def test(): - request_parameters = {"a_static_request_param": "a_static_value"} - provider = InterpolatedRequestParameterProvider(request_parameters=request_parameters, config=config) - - request_params = provider.request_params(state, stream_slice, next_page_token) - - assert request_parameters == request_params - - -def test_value_depends_on_state(): - request_parameters = {"a_static_request_param": "{{ stream_state['date'] }}"} - provider = InterpolatedRequestParameterProvider(request_parameters=request_parameters, config=config) - - request_params = provider.request_params(state, stream_slice, next_page_token) - - assert request_params["a_static_request_param"] == state["date"] - - -def test_value_depends_on_stream_slice(): - request_parameters = {"a_static_request_param": "{{ stream_slice['start_date'] }}"} - provider = InterpolatedRequestParameterProvider(request_parameters=request_parameters, config=config) - - request_params = provider.request_params(state, stream_slice, next_page_token) - - assert request_params["a_static_request_param"] == stream_slice["start_date"] - - -def test_value_depends_on_next_page_token(): - request_parameters = {"a_static_request_param": "{{ next_page_token['offset'] }}"} - provider = InterpolatedRequestParameterProvider(request_parameters=request_parameters, config=config) - - request_params = provider.request_params(state, stream_slice, next_page_token) - - assert request_params["a_static_request_param"] == next_page_token["offset"] - - -def test_value_depends_on_config(): - request_parameters = {"a_static_request_param": "{{ config['option'] }}"} - provider = InterpolatedRequestParameterProvider(request_parameters=request_parameters, config=config) - - request_params = provider.request_params(state, stream_slice, next_page_token) - - assert request_params["a_static_request_param"] == config["option"] - - -def test_parameter_is_interpolated(): - request_parameters = { - "{{ stream_state['date'] }} - {{stream_slice['start_date']}} - {{next_page_token['offset']}} - {{config['option']}}": "ABC" - } - provider = InterpolatedRequestParameterProvider(request_parameters=request_parameters, config=config) - - request_params = provider.request_params(state, stream_slice, next_page_token) - - assert request_params[f"{state['date']} - {stream_slice['start_date']} - {next_page_token['offset']} - {config['option']}"] == "ABC" - - -def test_none_value(): - request_parameters = {"a_static_request_param": "{{ stream_state['date'] }}"} - provider = InterpolatedRequestParameterProvider(request_parameters=request_parameters, config=config) - - request_params = provider.request_params({}, stream_slice, next_page_token) - - assert len(request_params) == 0 diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/iterators/test_interpolated_request_parameter_provider.py b/airbyte-cdk/python/unit_tests/sources/declarative/iterators/test_interpolated_request_parameter_provider.py deleted file mode 100644 index eff1dd651d4f..000000000000 --- a/airbyte-cdk/python/unit_tests/sources/declarative/iterators/test_interpolated_request_parameter_provider.py +++ /dev/null @@ -1,77 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - -from airbyte_cdk.sources.declarative.requesters.request_params.interpolated_request_parameter_provider import ( - InterpolatedRequestParameterProvider, -) - -state = {"date": "2021-01-01"} -stream_slice = {"start_date": "2020-01-01"} -next_page_token = {"offset": "12345"} -config = {"option": "OPTION"} - - -def test(): - request_parameters = {"a_static_request_param": "a_static_value"} - provider = InterpolatedRequestParameterProvider(request_parameters=request_parameters, config=config) - - request_params = provider.request_params(state, stream_slice, next_page_token) - - assert request_parameters == request_params - - -def test_value_depends_on_state(): - request_parameters = {"a_static_request_param": "{{ stream_state['date'] }}"} - provider = InterpolatedRequestParameterProvider(request_parameters=request_parameters, config=config) - - request_params = provider.request_params(state, stream_slice, next_page_token) - - assert request_params["a_static_request_param"] == state["date"] - - -def test_value_depends_on_stream_slice(): - request_parameters = {"a_static_request_param": "{{ stream_slice['start_date'] }}"} - provider = InterpolatedRequestParameterProvider(request_parameters=request_parameters, config=config) - - request_params = provider.request_params(state, stream_slice, next_page_token) - - assert request_params["a_static_request_param"] == stream_slice["start_date"] - - -def test_value_depends_on_next_page_token(): - request_parameters = {"a_static_request_param": "{{ next_page_token['offset'] }}"} - provider = InterpolatedRequestParameterProvider(request_parameters=request_parameters, config=config) - - request_params = provider.request_params(state, stream_slice, next_page_token) - - assert request_params["a_static_request_param"] == next_page_token["offset"] - - -def test_value_depends_on_config(): - request_parameters = {"a_static_request_param": "{{ config['option'] }}"} - provider = InterpolatedRequestParameterProvider(request_parameters=request_parameters, config=config) - - request_params = provider.request_params(state, stream_slice, next_page_token) - - assert request_params["a_static_request_param"] == config["option"] - - -def test_parameter_is_interpolated(): - request_parameters = { - "{{ stream_state['date'] }} - {{stream_slice['start_date']}} - {{next_page_token['offset']}} - {{config['option']}}": "ABC" - } - provider = InterpolatedRequestParameterProvider(request_parameters=request_parameters, config=config) - - request_params = provider.request_params(state, stream_slice, next_page_token) - - assert request_params[f"{state['date']} - {stream_slice['start_date']} - {next_page_token['offset']} - {config['option']}"] == "ABC" - - -def test_none_value(): - request_parameters = {"a_static_request_param": "{{ stream_state['date'] }}"} - provider = InterpolatedRequestParameterProvider(request_parameters=request_parameters, config=config) - - request_params = provider.request_params({}, stream_slice, next_page_token) - - assert len(request_params) == 0 diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_interpolated_paginator.py b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_interpolated_paginator.py index 984870db4a8e..360acb1484d1 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_interpolated_paginator.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_interpolated_paginator.py @@ -4,6 +4,7 @@ import json +import pytest import requests from airbyte_cdk.sources.declarative.decoders.json_decoder import JsonDecoder from airbyte_cdk.sources.declarative.requesters.paginators.interpolated_paginator import InterpolatedPaginator @@ -18,58 +19,28 @@ decoder = JsonDecoder() -def test_value_is_static(): - next_page_tokens = {"cursor": "a_static_value"} - paginator = InterpolatedPaginator(next_page_tokens, decoder, config) - - next_page_token = paginator.next_page_token(response, last_responses) - - assert next_page_token == {"cursor": "a_static_value"} - - -def test_value_depends_response_body(): - next_page_tokens = {"cursor": "{{ decoded_response['next_page_cursor'] }}"} - paginator = InterpolatedPaginator(next_page_tokens, decoder, config) - - next_page_token = paginator.next_page_token(response, last_responses) - - assert next_page_token == {"cursor": response_body["next_page_cursor"]} - - -def test_value_depends_response_header(): - next_page_tokens = {"cursor": "{{ headers['A_HEADER'] }}"} - paginator = InterpolatedPaginator(next_page_tokens, decoder, config) - - next_page_token = paginator.next_page_token(response, last_responses) - - assert next_page_token["cursor"] == response.headers["A_HEADER"] - - -def test_value_depends_on_last_responses(): - next_page_tokens = {"cursor": "{{ last_records[-1]['id'] }}"} - paginator = InterpolatedPaginator(next_page_tokens, decoder, config) - - next_page_token = paginator.next_page_token(response, last_responses) - - assert next_page_token["cursor"] == "0" - - -def test_name_is_interpolated(): - next_page_tokens = {"{{ decoded_response['next_page_cursor'] }}": "a_static_value"} - paginator = InterpolatedPaginator(next_page_tokens, decoder, config) - - next_page_token = paginator.next_page_token(response, last_responses) - - assert next_page_token == {response_body["next_page_cursor"]: "a_static_value"} - - -def test_token_is_none_if_field_not_found(): - next_page_tokens = {"cursor": "{{ decoded_response['next_page_cursor'] }}"} - paginator = InterpolatedPaginator(next_page_tokens, decoder, config) - - r = requests.Response() - r._content = json.dumps({"not_next_page_cursor": "12345"}).encode("utf-8") - - next_page_token = paginator.next_page_token(r, last_responses) - - assert next_page_token is None +@pytest.mark.parametrize( + "test_name, next_page_token_template, expected_next_page_token", + [ + ("test_value_is_static", {"cursor": "a_static_value"}, {"cursor": "a_static_value"}), + ( + "test_value_depends_response_body", + {"cursor": "{{ decoded_response['next_page_cursor'] }}"}, + {"cursor": response_body["next_page_cursor"]}, + ), + ("test_value_depends_response_header", {"cursor": "{{ headers['A_HEADER'] }}"}, {"cursor": response.headers["A_HEADER"]}), + ("test_value_depends_on_last_responses", {"cursor": "{{ last_records[-1]['id'] }}"}, {"cursor": "0"}), + ( + "test_name_is_interpolated", + {"{{ decoded_response['next_page_cursor'] }}": "a_static_value"}, + {response_body["next_page_cursor"]: "a_static_value"}, + ), + ("test_token_is_none_if_field_not_found", {"cursor": "{{ decoded_response['not_next_page_cursor'] }}"}, None), + ], +) +def test_interpolated_paginator(test_name, next_page_token_template, expected_next_page_token): + paginator = InterpolatedPaginator(next_page_token_template=next_page_token_template, decoder=decoder, config=config) + + actual_next_page_token = paginator.next_page_token(response, last_responses) + + assert expected_next_page_token == actual_next_page_token diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_next_page_url_paginator.py b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_next_page_url_paginator.py index 0af6dd0cf39f..aba00d9c8439 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_next_page_url_paginator.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_next_page_url_paginator.py @@ -6,7 +6,6 @@ import requests from airbyte_cdk.sources.declarative.decoders.json_decoder import JsonDecoder -from airbyte_cdk.sources.declarative.requesters.paginators.interpolated_paginator import InterpolatedPaginator from airbyte_cdk.sources.declarative.requesters.paginators.next_page_url_paginator import NextPageUrlPaginator config = {"option": "OPTION"} @@ -39,4 +38,4 @@ def test_no_next_page_found(): def create_paginator(template): - return NextPageUrlPaginator("https://airbyte.io/", InterpolatedPaginator(template, decoder, config)) + return NextPageUrlPaginator("https://airbyte.io/", next_page_token_template=template, config=config) diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/request_headers/test_interpolated_request_header_provider.py b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/request_headers/test_interpolated_request_header_provider.py deleted file mode 100644 index 2126366686e8..000000000000 --- a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/request_headers/test_interpolated_request_header_provider.py +++ /dev/null @@ -1,32 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - -import pytest as pytest -from airbyte_cdk.sources.declarative.requesters.request_headers.interpolated_request_header_provider import ( - InterpolatedRequestHeaderProvider, -) - - -@pytest.mark.parametrize( - "test_name, request_headers, expected_evaluated_headers", - [ - ("test_static_string", {"static_key": "static_string"}, {"static_key": "static_string"}), - ("test_static_number", {"static_key": 408}, {"static_key": 408}), - ("test_from_config", {"get_from_config": "{{ config['config_key'] }}"}, {"get_from_config": "value_of_config"}), - ("test_from_stream_state", {"get_from_state": "{{ stream_state['state_key'] }}"}, {"get_from_state": "state_value"}), - ("test_from_stream_slice", {"get_from_slice": "{{ stream_slice['slice_key'] }}"}, {"get_from_slice": "slice_value"}), - ("test_from_next_page_token", {"get_from_token": "{{ next_page_token['token_key'] }}"}, {"get_from_token": "token_value"}), - ("test_from_stream_state_missing_key", {"get_from_state": "{{ stream_state['does_not_exist'] }}"}, {}), - ("test_none_headers", None, {}), - ], -) -def test_interpolated_request_header(test_name, request_headers, expected_evaluated_headers): - config = {"config_key": "value_of_config"} - stream_state = {"state_key": "state_value"} - stream_slice = {"slice_key": "slice_value"} - next_page_token = {"token_key": "token_value"} - provider = InterpolatedRequestHeaderProvider(config=config, request_headers=request_headers) - - actual_headers = provider.request_headers(stream_state, stream_slice, next_page_token) - assert actual_headers == expected_evaluated_headers diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/request_options/__init__.py b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/request_options/__init__.py new file mode 100644 index 000000000000..1100c1c58cf5 --- /dev/null +++ b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/request_options/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/request_options/test_interpolated_request_options_provider.py b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/request_options/test_interpolated_request_options_provider.py new file mode 100644 index 000000000000..0dc242b076a4 --- /dev/null +++ b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/request_options/test_interpolated_request_options_provider.py @@ -0,0 +1,96 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import pytest +from airbyte_cdk.sources.declarative.requesters.request_options.interpolated_request_options_provider import ( + InterpolatedRequestOptionsProvider, +) + +state = {"date": "2021-01-01"} +stream_slice = {"start_date": "2020-01-01"} +next_page_token = {"offset": "12345", "page": "27"} +config = {"option": "OPTION"} + + +@pytest.mark.parametrize( + "test_name, input_request_params, expected_request_params", + [ + ("test_static_param", {"a_static_request_param": "a_static_value"}, {"a_static_request_param": "a_static_value"}), + ("test_value_depends_on_state", {"read_from_state": "{{ stream_state['date'] }}"}, {"read_from_state": "2021-01-01"}), + ("test_value_depends_on_stream_slice", {"read_from_slice": "{{ stream_slice['start_date'] }}"}, {"read_from_slice": "2020-01-01"}), + ("test_value_depends_on_next_page_token", {"read_from_token": "{{ next_page_token['offset'] }}"}, {"read_from_token": "12345"}), + ("test_value_depends_on_config", {"read_from_config": "{{ config['option'] }}"}, {"read_from_config": "OPTION"}), + ("test_none_value", {"missing_param": "{{ fake_path['date'] }}"}, {}), + ("test_return_empty_dict_for_string_templates", "Should return empty dict {{ stream_state['date'] }}", {}), + ( + "test_parameter_is_interpolated", + {"{{ stream_state['date'] }} - {{stream_slice['start_date']}} - {{next_page_token['offset']}} - {{config['option']}}": "ABC"}, + {"2021-01-01 - 2020-01-01 - 12345 - OPTION": "ABC"}, + ), + ], +) +def test_interpolated_request_params(test_name, input_request_params, expected_request_params): + provider = InterpolatedRequestOptionsProvider(config=config, request_parameters=input_request_params) + + actual_request_params = provider.request_params(state, stream_slice, next_page_token) + + assert actual_request_params == expected_request_params + + +@pytest.mark.parametrize( + "test_name, input_request_json, expected_request_json", + [ + ("test_static_json", {"a_static_request_param": "a_static_value"}, {"a_static_request_param": "a_static_value"}), + ("test_value_depends_on_state", {"read_from_state": "{{ stream_state['date'] }}"}, {"read_from_state": "2021-01-01"}), + ("test_value_depends_on_stream_slice", {"read_from_slice": "{{ stream_slice['start_date'] }}"}, {"read_from_slice": "2020-01-01"}), + ("test_value_depends_on_next_page_token", {"read_from_token": "{{ next_page_token['offset'] }}"}, {"read_from_token": "12345"}), + ("test_value_depends_on_config", {"read_from_config": "{{ config['option'] }}"}, {"read_from_config": "OPTION"}), + ("test_none_value", {"missing_json": "{{ fake_path['date'] }}"}, {}), + ( + "test_interpolated_keys", + {"{{ stream_state['date'] }}": 123, "{{ config['option'] }}": "ABC"}, + {"2021-01-01": 123, "OPTION": "ABC"}, + ), + ], +) +def test_interpolated_request_json(test_name, input_request_json, expected_request_json): + provider = InterpolatedRequestOptionsProvider(config=config, request_body_json=input_request_json) + + actual_request_json = provider.request_body_json(state, stream_slice, next_page_token) + + assert actual_request_json == expected_request_json + + +@pytest.mark.parametrize( + "test_name, input_request_data, expected_request_data", + [ + ("test_static_map_data", {"a_static_request_param": "a_static_value"}, {"a_static_request_param": "a_static_value"}), + ("test_static_string_data", "a_static_value", "a_static_value"), + ("test_string_depends_on_state", "key={{ stream_state['date'] }}", "key=2021-01-01"), + ("test_map_depends_on_stream_slice", {"read_from_slice": "{{ stream_slice['start_date'] }}"}, {"read_from_slice": "2020-01-01"}), + ("test_string_depends_on_next_page_token", "{{ next_page_token['page'] }} and {{ next_page_token['offset'] }}", "27 and 12345"), + ("test_map_depends_on_config", {"read_from_config": "{{ config['option'] }}"}, {"read_from_config": "OPTION"}), + ("test_defaults_to_empty_string", None, ""), + ("test_interpolated_keys", {"{{ stream_state['date'] }} - {{ next_page_token['offset'] }}": "ABC"}, {"2021-01-01 - 12345": "ABC"}), + ], +) +def test_interpolated_request_data(test_name, input_request_data, expected_request_data): + provider = InterpolatedRequestOptionsProvider(config=config, request_body_data=input_request_data) + + actual_request_data = provider.request_body_data(state, stream_slice, next_page_token) + + assert actual_request_data == expected_request_data + + +def test_error_on_create_for_both_request_json_and_data(): + request_json = {"body_key": "{{ stream_slice['start_date'] }}"} + request_data = "interpolate_me=5&invalid={{ config['option'] }}" + with pytest.raises(ValueError): + InterpolatedRequestOptionsProvider(config=config, request_body_json=request_json, request_body_data=request_data) + + +def test_interpolated_request_kwargs_is_empty(): + provider = InterpolatedRequestOptionsProvider(config=config) + actual_request_kwargs = provider.request_kwargs(state, stream_slice, next_page_token) + assert {} == actual_request_kwargs diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/request_params/test_interpolated_request_parameter_provider.py b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/request_params/test_interpolated_request_parameter_provider.py deleted file mode 100644 index 1699a62a9497..000000000000 --- a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/request_params/test_interpolated_request_parameter_provider.py +++ /dev/null @@ -1,78 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - - -from airbyte_cdk.sources.declarative.requesters.request_params.interpolated_request_parameter_provider import ( - InterpolatedRequestParameterProvider, -) - -state = {"date": "2021-01-01"} -stream_slice = {"start_date": "2020-01-01"} -next_page_token = {"offset": "12345"} -config = {"option": "OPTION"} - - -def test(): - request_parameters = {"a_static_request_param": "a_static_value"} - provider = InterpolatedRequestParameterProvider(request_parameters=request_parameters, config=config) - - request_params = provider.request_params(state, stream_slice, next_page_token) - - assert request_parameters == request_params - - -def test_value_depends_on_state(): - request_parameters = {"a_static_request_param": "{{ stream_state['date'] }}"} - provider = InterpolatedRequestParameterProvider(request_parameters=request_parameters, config=config) - - request_params = provider.request_params(state, stream_slice, next_page_token) - - assert request_params["a_static_request_param"] == state["date"] - - -def test_value_depends_on_stream_slice(): - request_parameters = {"a_static_request_param": "{{ stream_slice['start_date'] }}"} - provider = InterpolatedRequestParameterProvider(request_parameters=request_parameters, config=config) - - request_params = provider.request_params(state, stream_slice, next_page_token) - - assert request_params["a_static_request_param"] == stream_slice["start_date"] - - -def test_value_depends_on_next_page_token(): - request_parameters = {"a_static_request_param": "{{ next_page_token['offset'] }}"} - provider = InterpolatedRequestParameterProvider(request_parameters=request_parameters, config=config) - - request_params = provider.request_params(state, stream_slice, next_page_token) - - assert request_params["a_static_request_param"] == next_page_token["offset"] - - -def test_value_depends_on_config(): - request_parameters = {"a_static_request_param": "{{ config['option'] }}"} - provider = InterpolatedRequestParameterProvider(request_parameters=request_parameters, config=config) - - request_params = provider.request_params(state, stream_slice, next_page_token) - - assert request_params["a_static_request_param"] == config["option"] - - -def test_parameter_is_interpolated(): - request_parameters = { - "{{ stream_state['date'] }} - {{stream_slice['start_date']}} - {{next_page_token['offset']}} - {{config['option']}}": "ABC" - } - provider = InterpolatedRequestParameterProvider(request_parameters=request_parameters, config=config) - - request_params = provider.request_params(state, stream_slice, next_page_token) - - assert request_params[f"{state['date']} - {stream_slice['start_date']} - {next_page_token['offset']} - {config['option']}"] == "ABC" - - -def test_none_value(): - request_parameters = {"a_static_request_param": "{{ stream_state['date'] }}"} - provider = InterpolatedRequestParameterProvider(request_parameters=request_parameters, config=config) - - request_params = provider.request_params({}, stream_slice, next_page_token) - - assert len(request_params) == 0 diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/test_http_requester.py b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/test_http_requester.py index a9891445c8a7..baeeae2fe715 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/test_http_requester.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/test_http_requester.py @@ -11,9 +11,13 @@ def test(): http_method = "GET" - request_parameters_provider = MagicMock() + request_options_provider = MagicMock() request_params = {"param": "value"} - request_parameters_provider.request_params.return_value = request_params + request_body_data = "body_key_1=value_1&body_key_2=value2" + request_body_json = {"body_field": "body_value"} + request_options_provider.request_params.return_value = request_params + request_options_provider.request_body_data.return_value = request_body_data + request_options_provider.request_body_json.return_value = request_body_json request_headers_provider = MagicMock() request_headers = {"header": "value"} @@ -39,18 +43,19 @@ def test(): url_base="{{ config['url'] }}", path="v1/{{ stream_slice['id'] }}", http_method=http_method, - request_parameters_provider=request_parameters_provider, - request_headers_provider=request_headers_provider, + request_options_provider=request_options_provider, authenticator=authenticator, retrier=retrier, config=config, ) assert requester.get_url_base() == "https://airbyte.io" - assert requester.get_path(stream_state=None, stream_slice=stream_slice, next_page_token=None) == "v1/1234" + assert requester.get_path(stream_state={}, stream_slice=stream_slice, next_page_token={}) == "v1/1234" assert requester.get_authenticator() == authenticator assert requester.get_method() == HttpMethod.GET - assert requester.request_params(stream_state=None, stream_slice=None, next_page_token=None) == request_params + assert requester.request_params(stream_state={}, stream_slice=None, next_page_token=None) == request_params + assert requester.request_body_data(stream_state={}, stream_slice=None, next_page_token=None) == request_body_data + assert requester.request_body_json(stream_state={}, stream_slice=None, next_page_token=None) == request_body_json assert requester.max_retries == max_retries assert requester.should_retry(requests.Response()) == should_retry assert requester.backoff_time(requests.Response()) == backoff_time diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/test_interpolated_request_input_provider.py b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/test_interpolated_request_input_provider.py new file mode 100644 index 000000000000..625f9c05cd4a --- /dev/null +++ b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/test_interpolated_request_input_provider.py @@ -0,0 +1,48 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import pytest as pytest +from airbyte_cdk.sources.declarative.interpolation.interpolated_mapping import InterpolatedMapping +from airbyte_cdk.sources.declarative.interpolation.interpolated_string import InterpolatedString +from airbyte_cdk.sources.declarative.requesters.interpolated_request_input_provider import InterpolatedRequestInputProvider + + +@pytest.mark.parametrize( + "test_name, input_request_data, expected_request_data", + [ + ("test_static_string_data", "a_static_value", "a_static_value"), + ("test_string_depends_on_state", "key={{ stream_state['state_key'] }}", "key=state_value"), + ("test_string_depends_on_next_page_token", "{{ next_page_token['token_key'] }} + ultra", "token_value + ultra"), + ], +) +def test_interpolated_string_request_input_provider(test_name, input_request_data, expected_request_data): + config = {"config_key": "value_of_config"} + stream_state = {"state_key": "state_value"} + next_page_token = {"token_key": "token_value"} + + provider = InterpolatedRequestInputProvider(config=config, request_inputs=input_request_data) + actual_request_data = provider.request_inputs(stream_state=stream_state, next_page_token=next_page_token) + + assert isinstance(provider._interpolator, InterpolatedString) + assert actual_request_data == expected_request_data + + +@pytest.mark.parametrize( + "test_name, input_request_data, expected_request_data", + [ + ("test_static_map_data", {"a_static_request_param": "a_static_value"}, {"a_static_request_param": "a_static_value"}), + ("test_map_depends_on_stream_slice", {"read_from_slice": "{{ stream_slice['slice_key'] }}"}, {"read_from_slice": "slice_value"}), + ("test_map_depends_on_config", {"read_from_config": "{{ config['config_key'] }}"}, {"read_from_config": "value_of_config"}), + ("test_defaults_to_empty_dictionary", None, {}), + ], +) +def test_initialize_interpolated_mapping_request_input_provider(test_name, input_request_data, expected_request_data): + config = {"config_key": "value_of_config"} + stream_slice = {"slice_key": "slice_value"} + + provider = InterpolatedRequestInputProvider(config=config, request_inputs=input_request_data) + actual_request_data = provider.request_inputs(stream_state={}, stream_slice=stream_slice) + + assert isinstance(provider._interpolator, InterpolatedMapping) + assert actual_request_data == expected_request_data diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/test_interpolated_request_parameter_provider.py b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/test_interpolated_request_parameter_provider.py deleted file mode 100644 index 1699a62a9497..000000000000 --- a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/test_interpolated_request_parameter_provider.py +++ /dev/null @@ -1,78 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - - -from airbyte_cdk.sources.declarative.requesters.request_params.interpolated_request_parameter_provider import ( - InterpolatedRequestParameterProvider, -) - -state = {"date": "2021-01-01"} -stream_slice = {"start_date": "2020-01-01"} -next_page_token = {"offset": "12345"} -config = {"option": "OPTION"} - - -def test(): - request_parameters = {"a_static_request_param": "a_static_value"} - provider = InterpolatedRequestParameterProvider(request_parameters=request_parameters, config=config) - - request_params = provider.request_params(state, stream_slice, next_page_token) - - assert request_parameters == request_params - - -def test_value_depends_on_state(): - request_parameters = {"a_static_request_param": "{{ stream_state['date'] }}"} - provider = InterpolatedRequestParameterProvider(request_parameters=request_parameters, config=config) - - request_params = provider.request_params(state, stream_slice, next_page_token) - - assert request_params["a_static_request_param"] == state["date"] - - -def test_value_depends_on_stream_slice(): - request_parameters = {"a_static_request_param": "{{ stream_slice['start_date'] }}"} - provider = InterpolatedRequestParameterProvider(request_parameters=request_parameters, config=config) - - request_params = provider.request_params(state, stream_slice, next_page_token) - - assert request_params["a_static_request_param"] == stream_slice["start_date"] - - -def test_value_depends_on_next_page_token(): - request_parameters = {"a_static_request_param": "{{ next_page_token['offset'] }}"} - provider = InterpolatedRequestParameterProvider(request_parameters=request_parameters, config=config) - - request_params = provider.request_params(state, stream_slice, next_page_token) - - assert request_params["a_static_request_param"] == next_page_token["offset"] - - -def test_value_depends_on_config(): - request_parameters = {"a_static_request_param": "{{ config['option'] }}"} - provider = InterpolatedRequestParameterProvider(request_parameters=request_parameters, config=config) - - request_params = provider.request_params(state, stream_slice, next_page_token) - - assert request_params["a_static_request_param"] == config["option"] - - -def test_parameter_is_interpolated(): - request_parameters = { - "{{ stream_state['date'] }} - {{stream_slice['start_date']}} - {{next_page_token['offset']}} - {{config['option']}}": "ABC" - } - provider = InterpolatedRequestParameterProvider(request_parameters=request_parameters, config=config) - - request_params = provider.request_params(state, stream_slice, next_page_token) - - assert request_params[f"{state['date']} - {stream_slice['start_date']} - {next_page_token['offset']} - {config['option']}"] == "ABC" - - -def test_none_value(): - request_parameters = {"a_static_request_param": "{{ stream_state['date'] }}"} - provider = InterpolatedRequestParameterProvider(request_parameters=request_parameters, config=config) - - request_params = provider.request_params({}, stream_slice, next_page_token) - - assert len(request_params) == 0 diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/retrievers/test_simple_retriever.py b/airbyte-cdk/python/unit_tests/sources/declarative/retrievers/test_simple_retriever.py index 9a92bf9ce53c..bebf5c56b98e 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/retrievers/test_simple_retriever.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/retrievers/test_simple_retriever.py @@ -22,8 +22,8 @@ def test(): next_page_token = {"cursor": "cursor_value"} paginator.next_page_token.return_value = next_page_token - extractor = MagicMock() - extractor.extract_records.return_value = records + record_selector = MagicMock() + record_selector.select_records.return_value = records iterator = MagicMock() stream_slices = [{"date": "2022-01-01"}, {"date": "2022-01-02"}] @@ -62,7 +62,7 @@ def test(): use_cache = True requester.use_cache = use_cache - retriever = SimpleRetriever("stream_name", primary_key, requester, paginator, extractor, iterator, state) + retriever = SimpleRetriever("stream_name", primary_key, requester, paginator, record_selector, iterator, state) # hack because we clone the state... retriever._state = state diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/stream_slicers/test_cartesian_product_stream_slicer.py b/airbyte-cdk/python/unit_tests/sources/declarative/stream_slicers/test_cartesian_product_stream_slicer.py new file mode 100644 index 000000000000..29cc1d58eadb --- /dev/null +++ b/airbyte-cdk/python/unit_tests/sources/declarative/stream_slicers/test_cartesian_product_stream_slicer.py @@ -0,0 +1,61 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import pytest as pytest +from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources.declarative.interpolation.interpolated_string import InterpolatedString +from airbyte_cdk.sources.declarative.stream_slicers.cartesian_product_stream_slicer import CartesianProductStreamSlicer +from airbyte_cdk.sources.declarative.stream_slicers.datetime_stream_slicer import DatetimeStreamSlicer +from airbyte_cdk.sources.declarative.stream_slicers.list_stream_slicer import ListStreamSlicer + + +@pytest.mark.parametrize( + "test_name, stream_slicers, expected_slices", + [ + ( + "test_single_stream_slicer", + [ListStreamSlicer(["customer", "store", "subscription"], {"owner_resource": "{{ slice_value }}"}, None)], + [{"owner_resource": "customer"}, {"owner_resource": "store"}, {"owner_resource": "subscription"}], + ), + ( + "test_two_stream_slicers", + [ + ListStreamSlicer(["customer", "store", "subscription"], {"owner_resource": "{{ slice_value }}"}, None), + ListStreamSlicer(["A", "B"], {"letter": "{{ slice_value }}"}, None), + ], + [ + {"owner_resource": "customer", "letter": "A"}, + {"owner_resource": "customer", "letter": "B"}, + {"owner_resource": "store", "letter": "A"}, + {"owner_resource": "store", "letter": "B"}, + {"owner_resource": "subscription", "letter": "A"}, + {"owner_resource": "subscription", "letter": "B"}, + ], + ), + ( + "test_list_and_datetime", + [ + ListStreamSlicer(["customer", "store", "subscription"], {"owner_resource": "{{ slice_value }}"}, None), + DatetimeStreamSlicer( + InterpolatedString("2021-01-01"), InterpolatedString("2021-01-03"), "1d", InterpolatedString(""), "%Y-%m-%d", None + ), + ], + [ + {"owner_resource": "customer", "start_date": "2021-01-01", "end_date": "2021-01-01"}, + {"owner_resource": "customer", "start_date": "2021-01-02", "end_date": "2021-01-02"}, + {"owner_resource": "customer", "start_date": "2021-01-03", "end_date": "2021-01-03"}, + {"owner_resource": "store", "start_date": "2021-01-01", "end_date": "2021-01-01"}, + {"owner_resource": "store", "start_date": "2021-01-02", "end_date": "2021-01-02"}, + {"owner_resource": "store", "start_date": "2021-01-03", "end_date": "2021-01-03"}, + {"owner_resource": "subscription", "start_date": "2021-01-01", "end_date": "2021-01-01"}, + {"owner_resource": "subscription", "start_date": "2021-01-02", "end_date": "2021-01-02"}, + {"owner_resource": "subscription", "start_date": "2021-01-03", "end_date": "2021-01-03"}, + ], + ), + ], +) +def test_substream_slicer(test_name, stream_slicers, expected_slices): + slicer = CartesianProductStreamSlicer(stream_slicers) + slices = [s for s in slicer.stream_slices(SyncMode.incremental, stream_state=None)] + assert slices == expected_slices diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/stream_slicers/test_list_slicer.py b/airbyte-cdk/python/unit_tests/sources/declarative/stream_slicers/test_list_slicer.py new file mode 100644 index 000000000000..8f706687e958 --- /dev/null +++ b/airbyte-cdk/python/unit_tests/sources/declarative/stream_slicers/test_list_slicer.py @@ -0,0 +1,24 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import pytest as pytest +from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources.declarative.stream_slicers.list_stream_slicer import ListStreamSlicer + + +@pytest.mark.parametrize( + "test_name, slice_values, slice_definition, expected_slices", + [ + ( + "test_single_element", + ["customer", "store", "subscription"], + {"owner_resource": "{{ slice_value }}"}, + [{"owner_resource": "customer"}, {"owner_resource": "store"}, {"owner_resource": "subscription"}], + ), + ], +) +def test_list_slicer(test_name, slice_values, slice_definition, expected_slices): + slicer = ListStreamSlicer(slice_values, slice_definition, config={}) + slices = [s for s in slicer.stream_slices(SyncMode.incremental, stream_state=None)] + assert slices == expected_slices diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/test_factory.py b/airbyte-cdk/python/unit_tests/sources/declarative/test_factory.py index eac6a68731ee..7d4f74645476 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/test_factory.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/test_factory.py @@ -4,20 +4,26 @@ from airbyte_cdk.sources.declarative.declarative_stream import DeclarativeStream from airbyte_cdk.sources.declarative.decoders.json_decoder import JsonDecoder +from airbyte_cdk.sources.declarative.extractors.record_filter import RecordFilter +from airbyte_cdk.sources.declarative.extractors.record_selector import RecordSelector from airbyte_cdk.sources.declarative.parsers.factory import DeclarativeComponentFactory from airbyte_cdk.sources.declarative.parsers.yaml_parser import YamlParser -from airbyte_cdk.sources.declarative.requesters.request_params.interpolated_request_parameter_provider import ( - InterpolatedRequestParameterProvider, +from airbyte_cdk.sources.declarative.requesters.http_requester import HttpRequester +from airbyte_cdk.sources.declarative.requesters.paginators.next_page_url_paginator import NextPageUrlPaginator +from airbyte_cdk.sources.declarative.requesters.request_options.interpolated_request_options_provider import ( + InterpolatedRequestOptionsProvider, ) from airbyte_cdk.sources.declarative.requesters.requester import HttpMethod +from airbyte_cdk.sources.declarative.requesters.retriers.default_retrier import DefaultRetrier from airbyte_cdk.sources.declarative.retrievers.simple_retriever import SimpleRetriever from airbyte_cdk.sources.declarative.schema.json_schema import JsonSchema +from airbyte_cdk.sources.streams.http.requests_native_auth.token import TokenAuthenticator factory = DeclarativeComponentFactory() parser = YamlParser() -input_config = {"apikey": "verysecrettoken"} +input_config = {"apikey": "verysecrettoken", "repos": ["airbyte", "airbyte-cloud"]} def test_factory(): @@ -26,15 +32,19 @@ def test_factory(): offset_request_parameters: offset: "{{ next_page_token['offset'] }}" limit: "*ref(limit)" - offset_pagination_request_parameters: - class_name: airbyte_cdk.sources.declarative.requesters.request_params.interpolated_request_parameter_provider.InterpolatedRequestParameterProvider + request_options: + class_name: airbyte_cdk.sources.declarative.requesters.request_options.interpolated_request_options_provider.InterpolatedRequestOptionsProvider request_parameters: "*ref(offset_request_parameters)" + request_body_json: + body_offset: "{{ next_page_token['offset'] }}" """ config = parser.parse(content) - offset_pagination_request_parameters = factory.create_component(config["offset_pagination_request_parameters"], input_config)() - assert type(offset_pagination_request_parameters) == InterpolatedRequestParameterProvider - assert offset_pagination_request_parameters._interpolator._config == input_config - assert offset_pagination_request_parameters._interpolator._interpolator._mapping["offset"] == "{{ next_page_token['offset'] }}" + request_options_provider = factory.create_component(config["request_options"], input_config)() + assert type(request_options_provider) == InterpolatedRequestOptionsProvider + assert request_options_provider._parameter_interpolator._config == input_config + assert request_options_provider._parameter_interpolator._interpolator._mapping["offset"] == "{{ next_page_token['offset'] }}" + assert request_options_provider._body_json_interpolator._config == input_config + assert request_options_provider._body_json_interpolator._interpolator._mapping["body_offset"] == "{{ next_page_token['offset'] }}" def test_interpolate_config(): @@ -48,6 +58,33 @@ def test_interpolate_config(): assert authenticator._tokens == ["verysecrettoken"] +def test_list_based_stream_slicer_with_values_refd(): + content = """ + repositories: ["airbyte", "airbyte-cloud"] + stream_slicer: + class_name: airbyte_cdk.sources.declarative.stream_slicers.list_stream_slicer.ListStreamSlicer + slice_values: "*ref(repositories)" + slice_definition: + repository: "{{ slice_value }}" + """ + config = parser.parse(content) + stream_slicer = factory.create_component(config["stream_slicer"], input_config)() + assert ["airbyte", "airbyte-cloud"] == stream_slicer._slice_values + + +def test_list_based_stream_slicer_with_values_defined_in_config(): + content = """ + stream_slicer: + class_name: airbyte_cdk.sources.declarative.stream_slicers.list_stream_slicer.ListStreamSlicer + slice_values: "{{config['repos']}}" + slice_definition: + repository: "{{ slice_value }}" + """ + config = parser.parse(content) + stream_slicer = factory.create_component(config["stream_slicer"], input_config)() + assert ["airbyte", "airbyte-cloud"] == stream_slicer._slice_values + + def test_full_config(): content = """ decoder: @@ -55,6 +92,11 @@ def test_full_config(): extractor: class_name: airbyte_cdk.sources.declarative.extractors.jello.JelloExtractor decoder: "*ref(decoder)" +selector: + class_name: airbyte_cdk.sources.declarative.extractors.record_selector.RecordSelector + record_filter: + class_name: airbyte_cdk.sources.declarative.extractors.record_filter.RecordFilter + condition: "{{ record['id'] > stream_state['id'] }}" metadata_paginator: class_name: "airbyte_cdk.sources.declarative.requesters.paginators.next_page_url_paginator.NextPageUrlPaginator" next_page_token_template: @@ -62,8 +104,8 @@ def test_full_config(): next_page_url_from_token_partial: class_name: "airbyte_cdk.sources.declarative.interpolation.interpolated_string.InterpolatedString" string: "{{ next_page_token['next_page_url'] }}" -request_parameters_provider: - class_name: airbyte_cdk.sources.declarative.requesters.request_params.interpolated_request_parameter_provider.InterpolatedRequestParameterProvider +request_options_provider: + class_name: airbyte_cdk.sources.declarative.requesters.request_options.interpolated_request_options_provider.InterpolatedRequestOptionsProvider requester: class_name: airbyte_cdk.sources.declarative.requesters.http_requester.HttpRequester name: "{{ options['name'] }}" @@ -72,7 +114,7 @@ def test_full_config(): authenticator: class_name: airbyte_cdk.sources.streams.http.requests_native_auth.token.TokenAuthenticator token: "{{ config['apikey'] }}" - request_parameters_provider: "*ref(request_parameters_provider)" + request_parameters_provider: "*ref(request_options_provider)" retrier: class_name: airbyte_cdk.sources.declarative.requesters.retriers.default_retrier.DefaultRetrier retriever: @@ -108,6 +150,8 @@ def test_full_config(): default: "marketing/lists" paginator: ref: "*ref(metadata_paginator)" + record_selector: + ref: "*ref(selector)" check: class_name: airbyte_cdk.sources.declarative.checks.check_stream.CheckStream stream_names: ["list_stream"] @@ -125,11 +169,93 @@ def test_full_config(): assert type(stream._retriever) == SimpleRetriever assert stream._retriever._requester._method == HttpMethod.GET assert stream._retriever._requester._authenticator._tokens == ["verysecrettoken"] - assert type(stream._retriever._extractor._decoder) == JsonDecoder - assert stream._retriever._extractor._transform == ".result[]" + assert type(stream._retriever._record_selector) == RecordSelector + assert type(stream._retriever._record_selector._extractor._decoder) == JsonDecoder + assert stream._retriever._record_selector._extractor._transform == ".result[]" + assert type(stream._retriever._record_selector._record_filter) == RecordFilter + assert stream._retriever._record_selector._record_filter._filter_interpolator._condition == "{{ record['id'] > stream_state['id'] }}" assert stream._schema_loader._file_path._string == "./source_sendgrid/schemas/lists.json" checker = factory.create_component(config["check"], input_config)() streams_to_check = checker._stream_names assert len(streams_to_check) == 1 assert list(streams_to_check)[0] == "list_stream" + + assert stream._retriever._requester._path._default == "marketing/lists" + + +def test_create_requester(): + content = """ + requester: + class_name: airbyte_cdk.sources.declarative.requesters.http_requester.HttpRequester + path: "/v3/marketing/lists" + name: lists + url_base: "https://api.sendgrid.com" + authenticator: + type: "TokenAuthenticator" + token: "{{ config.apikey }}" + request_options_provider: + request_parameters: + page_size: 10 + request_headers: + header: header_value + """ + config = parser.parse(content) + component = factory.create_component(config["requester"], input_config)() + assert isinstance(component, HttpRequester) + assert isinstance(component._retrier, DefaultRetrier) + assert component._path._string == "/v3/marketing/lists" + assert component._url_base._string == "https://api.sendgrid.com" + assert isinstance(component._authenticator, TokenAuthenticator) + assert component._method == HttpMethod.GET + assert component._request_options_provider._parameter_interpolator._interpolator._mapping["page_size"] == 10 + assert component._request_options_provider._headers_interpolator._interpolator._mapping["header"] == "header_value" + assert component._name == "lists" + + +def test_full_config_with_defaults(): + content = """ + lists_stream: + class_name: "airbyte_cdk.sources.declarative.declarative_stream.DeclarativeStream" + options: + name: "lists" + primary_key: id + url_base: "https://api.sendgrid.com" + schema_loader: + file_path: "./source_sendgrid/schemas/{{options.name}}.yaml" + retriever: + paginator: + type: "NextPageUrlPaginator" + next_page_token_template: + next_page_token: "{{ decoded_response.metadata.next}}" + requester: + path: "/v3/marketing/lists" + authenticator: + type: "TokenAuthenticator" + token: "{{ config.apikey }}" + request_parameters: + page_size: 10 + record_selector: + extractor: + transform: ".result[]" + streams: + - "*ref(lists_stream)" + """ + config = parser.parse(content) + + stream_config = config["lists_stream"] + stream = factory.create_component(stream_config, input_config)() + assert type(stream) == DeclarativeStream + assert stream.primary_key == "id" + assert stream.name == "lists" + assert type(stream._schema_loader) == JsonSchema + assert type(stream._retriever) == SimpleRetriever + assert stream._retriever._requester._method == HttpMethod.GET + assert stream._retriever._requester._authenticator._tokens == ["verysecrettoken"] + assert stream._retriever._record_selector._extractor._transform == ".result[]" + assert stream._schema_loader._file_path._string == "./source_sendgrid/schemas/lists.yaml" + assert isinstance(stream._retriever._paginator, NextPageUrlPaginator) + assert stream._retriever._paginator._url_base == "https://api.sendgrid.com" + assert stream._retriever._paginator._interpolated_paginator._next_page_token_template._mapping == { + "next_page_token": "{{ decoded_response.metadata.next}}" + } diff --git a/airbyte-cdk/python/unit_tests/sources/streams/http/auth/test_auth.py b/airbyte-cdk/python/unit_tests/sources/streams/http/auth/test_auth.py index 2854c93f6953..3016113533eb 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/http/auth/test_auth.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/http/auth/test_auth.py @@ -5,7 +5,13 @@ import logging -from airbyte_cdk.sources.streams.http.auth import MultipleTokenAuthenticator, NoAuth, Oauth2Authenticator, TokenAuthenticator +from airbyte_cdk.sources.streams.http.auth import ( + BasicHttpAuthenticator, + MultipleTokenAuthenticator, + NoAuth, + Oauth2Authenticator, + TokenAuthenticator, +) LOGGER = logging.getLogger(__name__) @@ -41,6 +47,12 @@ def test_no_auth(): assert {} == no_auth.get_auth_header() +def test_basic_authenticator(): + token = BasicHttpAuthenticator("client_id", "client_secret") + header = token.get_auth_header() + assert {"Authorization": "Basic Y2xpZW50X2lkOmNsaWVudF9zZWNyZXQ="} == header + + class TestOauth2Authenticator: """ Test class for OAuth2Authenticator. @@ -51,6 +63,7 @@ class TestOauth2Authenticator: client_secret = "client_secret" refresh_token = "refresh_token" refresh_access_token_headers = {"Header_1": "value 1", "Header_2": "value 2"} + refresh_access_token_authenticator = BasicHttpAuthenticator(client_id, client_secret) def test_get_auth_header_fresh(self, mocker): """ @@ -129,3 +142,14 @@ def test_refresh_access_token(self, requests_mock): assert header in mock_refresh_token_call.last_request.headers assert self.refresh_access_token_headers[header] == mock_refresh_token_call.last_request.headers[header] assert mock_refresh_token_call.called + + def test_refresh_access_authenticator(self): + oauth = Oauth2Authenticator( + TestOauth2Authenticator.refresh_endpoint, + TestOauth2Authenticator.client_id, + TestOauth2Authenticator.client_secret, + TestOauth2Authenticator.refresh_token, + refresh_access_token_authenticator=TestOauth2Authenticator.refresh_access_token_authenticator, + ) + expected_headers = {"Authorization": "Basic Y2xpZW50X2lkOmNsaWVudF9zZWNyZXQ="} + assert expected_headers == oauth.get_refresh_access_token_headers() diff --git a/airbyte-cdk/python/unit_tests/sources/streams/http/test_http.py b/airbyte-cdk/python/unit_tests/sources/streams/http/test_http.py index b57bf6dea949..fe87508e51a5 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/http/test_http.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/http/test_http.py @@ -328,13 +328,13 @@ def test_text_json_body(self, mocker, requests_mock): list(stream.read_records(sync_mode=SyncMode.full_refresh)) def test_body_for_all_methods(self, mocker, requests_mock): - """Stream must send a body for POST/PATCH/PUT methods only""" + """Stream must send a body for GET/POST/PATCH/PUT methods only""" stream = PostHttpStream() methods = { "POST": True, "PUT": True, "PATCH": True, - "GET": False, + "GET": True, "DELETE": False, "OPTIONS": False, } @@ -476,10 +476,10 @@ def test_default_parse_response_error_message(api_response: dict, expected_messa assert message == expected_message -def test_default_parse_response_error_message_not_json(): +def test_default_parse_response_error_message_not_json(requests_mock): stream = StubBasicReadHttpStream() - response = MagicMock() - response.json.side_effect = requests.exceptions.JSONDecodeError() + requests_mock.register_uri("GET", "mock://test.com/not_json", text="this is not json") + response = requests.get("mock://test.com/not_json") message = stream.parse_response_error_message(response) assert message is None diff --git a/airbyte-cdk/python/unit_tests/sources/utils/test_schema_helpers.py b/airbyte-cdk/python/unit_tests/sources/utils/test_schema_helpers.py index 9268858e023b..55328fed0f2a 100644 --- a/airbyte-cdk/python/unit_tests/sources/utils/test_schema_helpers.py +++ b/airbyte-cdk/python/unit_tests/sources/utils/test_schema_helpers.py @@ -12,10 +12,9 @@ from pathlib import Path import jsonref -import pytest from airbyte_cdk.logger import AirbyteLogger from airbyte_cdk.models.airbyte_protocol import ConnectorSpecification -from airbyte_cdk.sources.utils.schema_helpers import ResourceSchemaLoader, check_config_against_spec_or_exit, get_secret_values +from airbyte_cdk.sources.utils.schema_helpers import ResourceSchemaLoader, check_config_against_spec_or_exit from pytest import fixture from pytest import raises as pytest_raises @@ -186,69 +185,3 @@ def test_shared_schemas_resolves_nested(): # Make sure generated schema is JSON serializable assert json.dumps(actual_schema) assert jsonref.JsonRef.replace_refs(actual_schema) - - -@pytest.mark.parametrize( - "schema,config,expected", - [ - ( - { - "type": "object", - "properties": { - "credentials": { - "type": "object", - "oneOf": [ - { - "type": "object", - "properties": { - "option_title": { - "type": "string", - "const": "OAuth Credentials", - } - }, - }, - { - "type": "object", - "properties": { - "option_title": {"type": "string"}, - "personal_access_token": { - "type": "string", - "airbyte_secret": True, - }, - }, - }, - ], - }, - "repository": {"type": "string"}, - "start_date": {"type": "string"}, - }, - }, - {"credentials": {"personal_access_token": "secret"}}, - ["secret"], - ), - ( - { - "type": "object", - "properties": { - "access_token": {"type": "string", "airbyte_secret": True}, - "whatever": {"type": "string", "airbyte_secret": False}, - }, - }, - {"access_token": "secret"}, - ["secret"], - ), - ( - { - "type": "object", - "properties": { - "access_token": {"type": "string", "airbyte_secret": False}, - "whatever": {"type": "string", "airbyte_secret": False}, - }, - }, - {"access_token": "secret"}, - [], - ), - ], -) -def test_get_secret_values(schema, config, expected): - assert get_secret_values(schema, config) == expected diff --git a/airbyte-cdk/python/unit_tests/sources/utils/test_sentry.py b/airbyte-cdk/python/unit_tests/sources/utils/test_sentry.py deleted file mode 100644 index ccba0f25e78e..000000000000 --- a/airbyte-cdk/python/unit_tests/sources/utils/test_sentry.py +++ /dev/null @@ -1,125 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - -import json -import os -from dataclasses import dataclass -from logging import getLogger -from typing import List -from unittest import mock - -import requests -from airbyte_cdk.sources.utils.sentry import AirbyteSentry -from sentry_sdk.transport import Transport - - -@mock.patch("airbyte_cdk.sources.utils.sentry.sentry_sdk") -def test_sentry_init_no_env(sentry_mock): - assert AirbyteSentry.DSN_ENV_NAME not in os.environ - AirbyteSentry.init("test_source") - assert not sentry_mock.init.called - assert not AirbyteSentry.sentry_enabled - AirbyteSentry.set_tag("tagname", "value") - assert not sentry_mock.set_tag.called - AirbyteSentry.add_breadcrumb("msg", data={}) - assert not sentry_mock.add_breadcrumb.called - - with AirbyteSentry.start_transaction("name", "op"): - assert not sentry_mock.start_transaction.called - - with AirbyteSentry.start_transaction_span("name", "op"): - assert not sentry_mock.start_span.called - - -@mock.patch.dict(os.environ, {AirbyteSentry.DSN_ENV_NAME: "dsn"}) -@mock.patch("airbyte_cdk.sources.utils.sentry.sentry_sdk") -def test_sentry_init(sentry_mock): - AirbyteSentry.init("test_source") - assert sentry_mock.init.called - sentry_mock.set_tag.assert_any_call("source", "test_source") - sentry_mock.set_tag.assert_any_call("run_id", mock.ANY) - assert AirbyteSentry.sentry_enabled - AirbyteSentry.set_tag("tagname", "value") - assert sentry_mock.set_tag.called - AirbyteSentry.add_breadcrumb("msg", data={}) - assert sentry_mock.add_breadcrumb.called - with AirbyteSentry.start_transaction("name", "op"): - assert sentry_mock.start_transaction.called - - with AirbyteSentry.start_transaction_span("name", "op"): - assert sentry_mock.start_span.called - - -@dataclass -class TestTransport(Transport): - secrets: List[str] - # Sentry sdk wraps sending event with try except that would intercept - # AssertionError exception resulting it would ignore assert directive. - # Use this variable to check if test failed after sentry code executed. - failed = None - - def capture_envelope(self, envelop): - for s in self.secrets: - for i in envelop.items: - payload = json.dumps(i.payload.json) - assert s not in payload - - def capture_event(self, event): - if self.failed: - return - event = json.dumps(event) - for s in self.secrets: - if s in event: - self.failed = f"{s} should not be in {event}" - return - - -@mock.patch.dict(os.environ, {AirbyteSentry.DSN_ENV_NAME: "https://22222@222.ingest.sentry.io/111"}) -def test_sentry_sensitive_info(httpserver): - SECRET = "SOME_secret" - UNEXPECTED_SECRET = "UnexEpectedSecret" - SECRETS = [SECRET] - transport = TestTransport(secrets=[*SECRETS, UNEXPECTED_SECRET]) - - AirbyteSentry.init("test_source", transport=transport, secret_values=SECRETS) - - AirbyteSentry.add_breadcrumb("msg", {"crumb": SECRET}) - AirbyteSentry.set_context("my secret", {"api_key": SECRET}) - AirbyteSentry.capture_message(f"this is {SECRET}") - AirbyteSentry.capture_message(f"Issue url http://localhost:{httpserver.port}/test?api_key={UNEXPECTED_SECRET}") - AirbyteSentry.capture_message(f"Issue url http://localhost:{httpserver.port}/test?access_token={UNEXPECTED_SECRET}") - AirbyteSentry.capture_message(f"Issue url http://localhost:{httpserver.port}/test?refresh_token={UNEXPECTED_SECRET}") - AirbyteSentry.set_context("headers", {"Authorization": f"Bearer {UNEXPECTED_SECRET}"}) - getLogger("airbyte").info(f"this is {SECRET}") - requests.get( - f"http://localhost:{httpserver.port}/test?api_key={SECRET}", - headers={"Authorization": f"Bearer {SECRET}"}, - ).text - requests.get( - f"http://localhost:{httpserver.port}/test?api_key={UNEXPECTED_SECRET}", - headers={"Authorization": f"Bearer {UNEXPECTED_SECRET}"}, - ).text - AirbyteSentry.capture_exception(Exception(f"Secret info: {SECRET}")) - assert not transport.failed - - -@mock.patch.dict(os.environ, {AirbyteSentry.DSN_ENV_NAME: "https://22222@222.ingest.sentry.io/111"}) -def test_sentry_sensitive_info_transactions(httpserver): - SECRET = "SOME_secret" - SECRETS = [SECRET] - UNEXPECTED_SECRET = "UnexEpectedSecret" - transport = TestTransport(secrets=[*SECRETS, UNEXPECTED_SECRET]) - AirbyteSentry.init("test_source", transport=transport, secret_values=SECRETS) - - AirbyteSentry.set_context("my secret", {"api_key": SECRET}) - AirbyteSentry.set_context("headers", {"Authorization": f"Bearer {UNEXPECTED_SECRET}"}) - with AirbyteSentry.start_transaction("name", "op"): - with AirbyteSentry.start_transaction_span( - "name", description=f"http://localhost:{httpserver.port}/test?api_key={UNEXPECTED_SECRET}" - ): - requests.get( - f"http://localhost:{httpserver.port}/test?api_key={SECRET}", - headers={"Authorization": f"Bearer {SECRET}"}, - ).text - assert not transport.failed diff --git a/airbyte-cdk/python/unit_tests/utils/test_secret_utils.py b/airbyte-cdk/python/unit_tests/utils/test_secret_utils.py index e86dc1215f59..0694b2786da7 100644 --- a/airbyte-cdk/python/unit_tests/utils/test_secret_utils.py +++ b/airbyte-cdk/python/unit_tests/utils/test_secret_utils.py @@ -2,21 +2,117 @@ # Copyright (c) 2022 Airbyte, Inc., all rights reserved. # +import pytest +from airbyte_cdk.utils.airbyte_secrets_utils import filter_secrets, get_secret_paths, get_secrets, update_secrets -from airbyte_cdk.utils.airbyte_secrets_utils import filter_secrets, update_secrets - -SECRET_VALUE = "i am a very sensitive secret" -ANOTHER_SECRET_VALUE = "also super secret" +SECRET_STRING_KEY = "secret_key1" +SECRET_STRING_VALUE = "secret_value" +SECRET_STRING_2_KEY = "secret_key2" +SECRET_STRING_2_VALUE = "second_secret_val" +SECRET_INT_KEY = "secret_int" +SECRET_INT_VALUE = 1337 +NOT_SECRET_KEY = "not_a_secret" NOT_SECRET_VALUE = "unimportant value" +flat_spec_with_secret = {"properties": {SECRET_STRING_KEY: {"type": "string", "airbyte_secret": True}, NOT_SECRET_KEY: {"type": "string"}}} +flat_config_with_secret = {SECRET_STRING_KEY: SECRET_STRING_VALUE, NOT_SECRET_KEY: NOT_SECRET_VALUE} + +flat_spec_with_secret_int = { + "properties": {SECRET_INT_KEY: {"type": "integer", "airbyte_secret": True}, NOT_SECRET_KEY: {"type": "string"}} +} +flat_config_with_secret_int = {SECRET_INT_KEY: SECRET_INT_VALUE, NOT_SECRET_KEY: NOT_SECRET_VALUE} + +flat_spec_without_secrets = {"properties": {NOT_SECRET_KEY: {"type": "string"}}} +flat_config_without_secrets = {NOT_SECRET_KEY: NOT_SECRET_VALUE} + +spec_with_oneof_secrets = { + "properties": { + SECRET_STRING_KEY: {"type": "string", "airbyte_secret": True}, + NOT_SECRET_KEY: {"type": "string"}, + "credentials": { + "type": "object", + "oneOf": [ + { + "type": "object", + "properties": {SECRET_STRING_2_KEY: {"type": "string", "airbyte_secret": True}, NOT_SECRET_KEY: {"type": "string"}}, + }, + { + "type": "object", + "properties": {SECRET_INT_KEY: {"type": "integer", "airbyte_secret": True}, NOT_SECRET_KEY: {"type": "string"}}, + }, + ], + }, + } +} +config_with_oneof_secrets_1 = { + SECRET_STRING_KEY: SECRET_STRING_VALUE, + NOT_SECRET_KEY: NOT_SECRET_VALUE, + "credentials": {SECRET_STRING_2_KEY: SECRET_STRING_2_VALUE}, +} +config_with_oneof_secrets_2 = { + SECRET_STRING_KEY: SECRET_STRING_VALUE, + NOT_SECRET_KEY: NOT_SECRET_VALUE, + "credentials": {SECRET_INT_KEY: SECRET_INT_VALUE}, +} + +spec_with_nested_secrets = { + "properties": { + SECRET_STRING_KEY: {"type": "string", "airbyte_secret": True}, + NOT_SECRET_KEY: {"type": "string"}, + "credentials": { + "type": "object", + "properties": { + SECRET_STRING_2_KEY: {"type": "string", "airbyte_secret": True}, + NOT_SECRET_KEY: {"type": "string"}, + SECRET_INT_KEY: {"type": "integer", "airbyte_secret": True}, + }, + }, + } +} +config_with_nested_secrets = { + SECRET_STRING_KEY: SECRET_STRING_VALUE, + NOT_SECRET_KEY: NOT_SECRET_VALUE, + "credentials": {SECRET_STRING_2_KEY: SECRET_STRING_2_VALUE, SECRET_INT_KEY: SECRET_INT_VALUE}, +} + + +@pytest.mark.parametrize( + ["spec", "expected"], + [ + (flat_spec_with_secret, [[SECRET_STRING_KEY]]), + (flat_spec_without_secrets, []), + (flat_spec_with_secret_int, [[SECRET_INT_KEY]]), + (spec_with_oneof_secrets, [[SECRET_STRING_KEY], ["credentials", SECRET_STRING_2_KEY], ["credentials", SECRET_INT_KEY]]), + (spec_with_nested_secrets, [[SECRET_STRING_KEY], ["credentials", SECRET_STRING_2_KEY], ["credentials", SECRET_INT_KEY]]), + ], +) +def test_get_secret_paths(spec, expected): + assert get_secret_paths(spec) == expected, f"Expected {spec} to yield secret paths {expected}" + + +@pytest.mark.parametrize( + ["spec", "config", "expected"], + [ + (flat_spec_with_secret, flat_config_with_secret, [SECRET_STRING_VALUE]), + (flat_spec_without_secrets, flat_config_without_secrets, []), + (flat_spec_with_secret_int, flat_config_with_secret_int, [SECRET_INT_VALUE]), + (spec_with_oneof_secrets, config_with_oneof_secrets_1, [SECRET_STRING_VALUE, SECRET_STRING_2_VALUE]), + (spec_with_oneof_secrets, config_with_oneof_secrets_2, [SECRET_STRING_VALUE, SECRET_INT_VALUE]), + (spec_with_nested_secrets, config_with_nested_secrets, [SECRET_STRING_VALUE, SECRET_STRING_2_VALUE, SECRET_INT_VALUE]), + ], +) +def test_get_secrets(spec, config, expected): + assert get_secrets(spec, config) == expected, f"Expected the spec {spec} and config {config} to produce {expected}" + + def test_secret_filtering(): - sensitive_str = f"{SECRET_VALUE} {NOT_SECRET_VALUE} {SECRET_VALUE} {ANOTHER_SECRET_VALUE}" + sensitive_str = f"{SECRET_STRING_VALUE} {NOT_SECRET_VALUE} {SECRET_STRING_VALUE} {SECRET_STRING_2_VALUE}" update_secrets([]) filtered = filter_secrets(sensitive_str) assert filtered == sensitive_str - update_secrets([SECRET_VALUE, ANOTHER_SECRET_VALUE]) + update_secrets([SECRET_STRING_VALUE, SECRET_STRING_2_VALUE]) filtered = filter_secrets(sensitive_str) assert filtered == f"**** {NOT_SECRET_VALUE} **** ****" diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/features/EnvVariableFeatureFlags.java b/airbyte-commons/src/main/java/io/airbyte/commons/features/EnvVariableFeatureFlags.java index c784a7e3103b..9991fd35c503 100644 --- a/airbyte-commons/src/main/java/io/airbyte/commons/features/EnvVariableFeatureFlags.java +++ b/airbyte-commons/src/main/java/io/airbyte/commons/features/EnvVariableFeatureFlags.java @@ -4,11 +4,14 @@ package io.airbyte.commons.features; +import java.util.function.Function; import lombok.extern.slf4j.Slf4j; @Slf4j public class EnvVariableFeatureFlags implements FeatureFlags { + public static final String USE_STREAM_CAPABLE_STATE = "USE_STREAM_CAPABLE_STATE"; + @Override public boolean autoDisablesFailingConnections() { log.info("Auto Disable Failing Connections: " + Boolean.parseBoolean(System.getenv("AUTO_DISABLE_FAILING_CONNECTIONS"))); @@ -26,4 +29,20 @@ public boolean forceSecretMigration() { return Boolean.parseBoolean(System.getenv("FORCE_MIGRATE_SECRET_STORE")); } + @Override + public boolean useStreamCapableState() { + return getEnvOrDefault(USE_STREAM_CAPABLE_STATE, false, Boolean::parseBoolean); + } + + // TODO: refactor in order to use the same method than the ones in EnvConfigs.java + public T getEnvOrDefault(final String key, final T defaultValue, final Function parser) { + final String value = System.getenv(key); + if (value != null && !value.isEmpty()) { + return parser.apply(value); + } else { + log.info("Using default value for environment variable {}: '{}'", key, defaultValue); + return defaultValue; + } + } + } diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/features/FeatureFlags.java b/airbyte-commons/src/main/java/io/airbyte/commons/features/FeatureFlags.java index 5833028056f3..1053e1903598 100644 --- a/airbyte-commons/src/main/java/io/airbyte/commons/features/FeatureFlags.java +++ b/airbyte-commons/src/main/java/io/airbyte/commons/features/FeatureFlags.java @@ -16,4 +16,6 @@ public interface FeatureFlags { boolean forceSecretMigration(); + boolean useStreamCapableState(); + } diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/json/JsonPaths.java b/airbyte-commons/src/main/java/io/airbyte/commons/json/JsonPaths.java index 70062f294392..4571efb4ab3d 100644 --- a/airbyte-commons/src/main/java/io/airbyte/commons/json/JsonPaths.java +++ b/airbyte-commons/src/main/java/io/airbyte/commons/json/JsonPaths.java @@ -15,6 +15,7 @@ import com.jayway.jsonpath.spi.json.JsonProvider; import com.jayway.jsonpath.spi.mapper.JacksonMappingProvider; import com.jayway.jsonpath.spi.mapper.MappingProvider; +import io.airbyte.commons.json.JsonSchemas.FieldNameOrList; import io.airbyte.commons.util.MoreIterators; import java.util.Collections; import java.util.EnumSet; @@ -94,6 +95,20 @@ public static String appendAppendListSplat(final String jsonPath) { return jsonPath + JSON_PATH_LIST_SPLAT; } + /** + * Map path produced by {@link JsonSchemas} to the JSONPath format. + * + * @param jsonSchemaPath - path as described in {@link JsonSchemas} + * @return path as JSONPath + */ + public static String mapJsonSchemaPathToJsonPath(final List jsonSchemaPath) { + String jsonPath = empty(); + for (final FieldNameOrList fieldNameOrList : jsonSchemaPath) { + jsonPath = fieldNameOrList.isList() ? appendAppendListSplat(jsonPath) : appendField(jsonPath, fieldNameOrList.getFieldName()); + } + return jsonPath; + } + /* * This version of the JsonPath Configuration object allows queries to return to the path of values * instead of the values that were found. @@ -117,7 +132,7 @@ public static void assertIsJsonPath(final String jsonPath) { * @param jsonPath - path to validate */ public static void assertIsSingleReturnQuery(final String jsonPath) { - Preconditions.checkArgument(!jsonPath.contains("*"), "Cannot accept paths with wildcards because they may return more than one item."); + Preconditions.checkArgument(JsonPath.isPathDefinite(jsonPath), "Cannot accept paths with wildcards because they may return more than one item."); } /** diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/json/JsonSchemas.java b/airbyte-commons/src/main/java/io/airbyte/commons/json/JsonSchemas.java index b6da1dac351a..d951fb401073 100644 --- a/airbyte-commons/src/main/java/io/airbyte/commons/json/JsonSchemas.java +++ b/airbyte-commons/src/main/java/io/airbyte/commons/json/JsonSchemas.java @@ -6,19 +6,20 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; +import com.google.common.base.Preconditions; import io.airbyte.commons.io.IOs; import io.airbyte.commons.resources.MoreResources; import io.airbyte.commons.util.MoreIterators; +import io.airbyte.commons.util.MoreLists; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; -import java.util.Collection; import java.util.Collections; -import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map.Entry; +import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.function.BiConsumer; @@ -95,8 +96,33 @@ public static Path prepareSchemas(final String resourceDir, final Class k } } - public static void traverseJsonSchema(final JsonNode jsonSchemaNode, final BiConsumer consumer) { - traverseJsonSchemaInternal(jsonSchemaNode, JsonPaths.empty(), consumer); + /** + * Traverse a JsonSchema object. The provided consumer will be called at each node with the node and + * the path to the node. + * + * @param jsonSchema - JsonSchema object to traverse + * @param consumer - accepts the current node and the path to that node. + */ + public static void traverseJsonSchema(final JsonNode jsonSchema, final BiConsumer> consumer) { + traverseJsonSchemaInternal(jsonSchema, new ArrayList<>(), consumer); + } + + /** + * Traverse a JsonSchema object. At each node, map a value. + * + * @param jsonSchema - JsonSchema object to traverse + * @param mapper - accepts the current node and the path to that node. whatever is returned will be + * collected and returned by the final collection. + * @param - type of objects being collected + * @return - collection of all items that were collected during the traversal. Returns a { @link + * Collection } because there is no order or uniqueness guarantee so neither List nor Set + * make sense. + */ + public static List traverseJsonSchemaWithCollector(final JsonNode jsonSchema, + final BiFunction, T> mapper) { + // for the sake of code reuse, use the filtered collector method but makes sure the filter always + // returns true. + return traverseJsonSchemaWithFilteredCollector(jsonSchema, (node, path) -> Optional.ofNullable(mapper.apply(node, path))); } /** @@ -107,48 +133,48 @@ public static void traverseJsonSchema(final JsonNode jsonSchemaNode, final BiCon * optional, nothing will be collected, otherwise, whatever is returned will be collected and * returned by the final collection. * @param - type of objects being collected - * @return - collection of all items that were collected during the traversal. Returns a { @link - * Collection } because there is no order or uniqueness guarantee so neither List nor Set - * make sense. + * @return - collection of all items that were collected during the traversal. Returns values in + * preoorder traversal order. */ - public static Collection traverseJsonSchemaWithCollector(final JsonNode jsonSchema, final BiFunction> mapper) { - final List collectors = new ArrayList<>(); - traverseJsonSchema(jsonSchema, (node, path) -> mapper.apply(node, path).ifPresent(collectors::add)); - return collectors; + public static List traverseJsonSchemaWithFilteredCollector(final JsonNode jsonSchema, + final BiFunction, Optional> mapper) { + final List collector = new ArrayList<>(); + traverseJsonSchema(jsonSchema, (node, path) -> mapper.apply(node, path).ifPresent(collector::add)); + return collector.stream().toList(); // make list unmodifiable } /** * Traverses a JsonSchema object. It returns the path to each node that meet the provided condition. - * The paths are return in JsonPath format + * The paths are return in JsonPath format. The traversal is depth-first search preoorder and values + * are returned in that order. * * @param obj - JsonSchema object to traverse * @param predicate - predicate to determine if the path for a node should be collected. * @return - collection of all paths that were collected during the traversal. */ - public static Set collectJsonPathsThatMeetCondition(final JsonNode obj, final Predicate predicate) { - return new HashSet<>(traverseJsonSchemaWithCollector(obj, (node, path) -> { + public static List> collectPathsThatMeetCondition(final JsonNode obj, final Predicate predicate) { + return traverseJsonSchemaWithFilteredCollector(obj, (node, path) -> { if (predicate.test(node)) { return Optional.of(path); } else { return Optional.empty(); } - })); + }); } /** * Recursive, depth-first implementation of { @link JsonSchemas#traverseJsonSchema(final JsonNode * jsonNode, final BiConsumer> consumer) }. Takes path as argument so that - * the path can be passsed to the consumer. + * the path can be passed to the consumer. * * @param jsonSchemaNode - jsonschema object to traverse. - * @param path - path from the first call of traverseJsonSchema to the current node. * @param consumer - consumer to be called at each node. it accepts the current node and the path to * the node from the root of the object passed at the root level invocation + * */ - // todo (cgardens) - replace with easier to understand traversal logic from SecretsHelper. private static void traverseJsonSchemaInternal(final JsonNode jsonSchemaNode, - final String path, - final BiConsumer consumer) { + final List path, + final BiConsumer> consumer) { if (!jsonSchemaNode.isObject()) { throw new IllegalArgumentException(String.format("json schema nodes should always be object nodes. path: %s actual: %s", path, jsonSchemaNode)); } @@ -162,23 +188,25 @@ private static void traverseJsonSchemaInternal(final JsonNode jsonSchemaNode, switch (nodeType) { // case BOOLEAN_TYPE, NUMBER_TYPE, STRING_TYPE, NULL_TYPE -> do nothing after consumer.accept above. case ARRAY_TYPE -> { - final String newPath = JsonPaths.appendAppendListSplat(path); - // hit every node. - // log.error("array: " + jsonSchemaNode); - traverseJsonSchemaInternal(jsonSchemaNode.get(JSON_SCHEMA_ITEMS_KEY), newPath, consumer); + final List newPath = MoreLists.add(path, FieldNameOrList.list()); + if (jsonSchemaNode.has(JSON_SCHEMA_ITEMS_KEY)) { + // hit every node. + traverseJsonSchemaInternal(jsonSchemaNode.get(JSON_SCHEMA_ITEMS_KEY), newPath, consumer); + } else { + throw new IllegalArgumentException( + "malformed JsonSchema array type, must have items field in " + jsonSchemaNode); + } } case OBJECT_TYPE -> { final Optional comboKeyWordOptional = getKeywordIfComposite(jsonSchemaNode); if (jsonSchemaNode.has(JSON_SCHEMA_PROPERTIES_KEY)) { for (final Iterator> it = jsonSchemaNode.get(JSON_SCHEMA_PROPERTIES_KEY).fields(); it.hasNext();) { final Entry child = it.next(); - final String newPath = JsonPaths.appendField(path, child.getKey()); - // log.error("obj1: " + jsonSchemaNode); + final List newPath = MoreLists.add(path, FieldNameOrList.fieldName(child.getKey())); traverseJsonSchemaInternal(child.getValue(), newPath, consumer); } } else if (comboKeyWordOptional.isPresent()) { for (final JsonNode arrayItem : jsonSchemaNode.get(comboKeyWordOptional.get())) { - // log.error("obj2: " + jsonSchemaNode); traverseJsonSchemaInternal(arrayItem, path, consumer); } } else { @@ -206,8 +234,15 @@ private static Optional getKeywordIfComposite(final JsonNode node) { return Optional.empty(); } - public static List getTypeOrObject(final JsonNode jsonNode) { - final List types = getType(jsonNode); + /** + * Same logic as {@link #getType(JsonNode)} except when no type is found, it defaults to type: + * Object. + * + * @param jsonSchema - JSONSchema object + * @return type of the node. + */ + public static List getTypeOrObject(final JsonNode jsonSchema) { + final List types = getType(jsonSchema); if (types.isEmpty()) { return List.of(OBJECT_TYPE); } else { @@ -215,21 +250,96 @@ public static List getTypeOrObject(final JsonNode jsonNode) { } } - public static List getType(final JsonNode jsonNode) { - if (jsonNode.has(JSON_SCHEMA_TYPE_KEY)) { - if (jsonNode.get(JSON_SCHEMA_TYPE_KEY).isArray()) { - return MoreIterators.toList(jsonNode.get(JSON_SCHEMA_TYPE_KEY).iterator()) + /** + * Get the type of JSONSchema node. Uses JSONSchema types. Only returns the type of the "top-level" + * node. e.g. if more nodes are nested underneath because it is an object or an array, only the top + * level type is returned. + * + * @param jsonSchema - JSONSchema object + * @return type of the node. + */ + public static List getType(final JsonNode jsonSchema) { + if (jsonSchema.has(JSON_SCHEMA_TYPE_KEY)) { + if (jsonSchema.get(JSON_SCHEMA_TYPE_KEY).isArray()) { + return MoreIterators.toList(jsonSchema.get(JSON_SCHEMA_TYPE_KEY).iterator()) .stream() .map(JsonNode::asText) .collect(Collectors.toList()); } else { - return List.of(jsonNode.get(JSON_SCHEMA_TYPE_KEY).asText()); + return List.of(jsonSchema.get(JSON_SCHEMA_TYPE_KEY).asText()); } } - if (jsonNode.has(JSON_SCHEMA_ENUM_KEY)) { + if (jsonSchema.has(JSON_SCHEMA_ENUM_KEY)) { return List.of(STRING_TYPE); } return Collections.emptyList(); } + /** + * Provides a basic scheme for describing the path into a JSON object. Each element in the path is + * either a field name or a list. + * + * This class is helpful in the case where fields can be any UTF-8 string, so the only simple way to + * keep track of the different parts of a path without going crazy with escape characters is to keep + * it in a list with list set aside as a special case. + * + * We prefer using this scheme instead of JSONPath in the tree traversal because, it is easier to + * decompose a path in this scheme than it is in JSONPath. Some callers of the traversal logic want + * to isolate parts of the path easily without the need for complex regex (that would be required if + * we used JSONPath). + */ + public static class FieldNameOrList { + + private final String fieldName; + private final boolean isList; + + public static FieldNameOrList fieldName(final String fieldName) { + return new FieldNameOrList(fieldName); + } + + public static FieldNameOrList list() { + return new FieldNameOrList(null); + } + + private FieldNameOrList(final String fieldName) { + isList = fieldName == null; + this.fieldName = fieldName; + } + + public String getFieldName() { + Preconditions.checkState(!isList, "cannot return field name, is list node"); + return fieldName; + } + + public boolean isList() { + return isList; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (!(o instanceof FieldNameOrList)) { + return false; + } + final FieldNameOrList that = (FieldNameOrList) o; + return isList == that.isList && Objects.equals(fieldName, that.fieldName); + } + + @Override + public int hashCode() { + return Objects.hash(fieldName, isList); + } + + @Override + public String toString() { + return "FieldNameOrList{" + + "fieldName='" + fieldName + '\'' + + ", isList=" + isList + + '}'; + } + + } + } diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/util/MoreLists.java b/airbyte-commons/src/main/java/io/airbyte/commons/util/MoreLists.java index c12e5d7df3a7..fa6779f0d64d 100644 --- a/airbyte-commons/src/main/java/io/airbyte/commons/util/MoreLists.java +++ b/airbyte-commons/src/main/java/io/airbyte/commons/util/MoreLists.java @@ -48,4 +48,18 @@ public static List concat(final List... lists) { return Stream.of(lists).flatMap(List::stream).toList(); } + /** + * Copies provided list and adds the new item to the copy. + * + * @param list list to copy and add to + * @param toAdd item to add + * @param type of list + * @return new list with contents of provided list and the added item + */ + public static List add(final List list, final T toAdd) { + final ArrayList newList = new ArrayList<>(list); + newList.add(toAdd); + return newList; + } + } diff --git a/airbyte-commons/src/test/java/io/airbyte/commons/json/JsonSchemasTest.java b/airbyte-commons/src/test/java/io/airbyte/commons/json/JsonSchemasTest.java index b98d919e3cd7..9a814017a20c 100644 --- a/airbyte-commons/src/test/java/io/airbyte/commons/json/JsonSchemasTest.java +++ b/airbyte-commons/src/test/java/io/airbyte/commons/json/JsonSchemasTest.java @@ -5,11 +5,15 @@ package io.airbyte.commons.json; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.Mockito.mock; import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.commons.json.JsonSchemas.FieldNameOrList; import io.airbyte.commons.resources.MoreResources; import java.io.IOException; +import java.util.Collections; +import java.util.List; import java.util.function.BiConsumer; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; @@ -41,19 +45,24 @@ void testMutateTypeToArrayStandard() { @Test void testTraverse() throws IOException { final JsonNode jsonWithAllTypes = Jsons.deserialize(MoreResources.readResource("json_schemas/json_with_all_types.json")); - final BiConsumer mock = mock(BiConsumer.class); + final BiConsumer> mock = mock(BiConsumer.class); JsonSchemas.traverseJsonSchema(jsonWithAllTypes, mock); final InOrder inOrder = Mockito.inOrder(mock); - inOrder.verify(mock).accept(jsonWithAllTypes, JsonPaths.empty()); - inOrder.verify(mock).accept(jsonWithAllTypes.get("properties").get("name"), "$.name"); - inOrder.verify(mock).accept(jsonWithAllTypes.get("properties").get("name").get("properties").get("first"), "$.name.first"); - inOrder.verify(mock).accept(jsonWithAllTypes.get("properties").get("name").get("properties").get("last"), "$.name.last"); - inOrder.verify(mock).accept(jsonWithAllTypes.get("properties").get("company"), "$.company"); - inOrder.verify(mock).accept(jsonWithAllTypes.get("properties").get("pets"), "$.pets"); - inOrder.verify(mock).accept(jsonWithAllTypes.get("properties").get("pets").get("items"), "$.pets[*]"); - inOrder.verify(mock).accept(jsonWithAllTypes.get("properties").get("pets").get("items").get("properties").get("type"), "$.pets[*].type"); - inOrder.verify(mock).accept(jsonWithAllTypes.get("properties").get("pets").get("items").get("properties").get("number"), "$.pets[*].number"); + inOrder.verify(mock).accept(jsonWithAllTypes, Collections.emptyList()); + inOrder.verify(mock).accept(jsonWithAllTypes.get("properties").get("name"), List.of(FieldNameOrList.fieldName("name"))); + inOrder.verify(mock).accept(jsonWithAllTypes.get("properties").get("name").get("properties").get("first"), + List.of(FieldNameOrList.fieldName("name"), FieldNameOrList.fieldName("first"))); + inOrder.verify(mock).accept(jsonWithAllTypes.get("properties").get("name").get("properties").get("last"), + List.of(FieldNameOrList.fieldName("name"), FieldNameOrList.fieldName("last"))); + inOrder.verify(mock).accept(jsonWithAllTypes.get("properties").get("company"), List.of(FieldNameOrList.fieldName("company"))); + inOrder.verify(mock).accept(jsonWithAllTypes.get("properties").get("pets"), List.of(FieldNameOrList.fieldName("pets"))); + inOrder.verify(mock).accept(jsonWithAllTypes.get("properties").get("pets").get("items"), + List.of(FieldNameOrList.fieldName("pets"), FieldNameOrList.list())); + inOrder.verify(mock).accept(jsonWithAllTypes.get("properties").get("pets").get("items").get("properties").get("type"), + List.of(FieldNameOrList.fieldName("pets"), FieldNameOrList.list(), FieldNameOrList.fieldName("type"))); + inOrder.verify(mock).accept(jsonWithAllTypes.get("properties").get("pets").get("items").get("properties").get("number"), + List.of(FieldNameOrList.fieldName("pets"), FieldNameOrList.list(), FieldNameOrList.fieldName("number"))); inOrder.verifyNoMoreInteractions(); } @@ -68,20 +77,22 @@ void testTraverseComposite(final String compositeKeyword) throws IOException { final String jsonSchemaString = MoreResources.readResource("json_schemas/composite_json_schema.json") .replaceAll("", compositeKeyword); final JsonNode jsonWithAllTypes = Jsons.deserialize(jsonSchemaString); - final BiConsumer mock = mock(BiConsumer.class); + final BiConsumer> mock = mock(BiConsumer.class); JsonSchemas.traverseJsonSchema(jsonWithAllTypes, mock); final InOrder inOrder = Mockito.inOrder(mock); - inOrder.verify(mock).accept(jsonWithAllTypes, JsonPaths.empty()); - inOrder.verify(mock).accept(jsonWithAllTypes.get(compositeKeyword).get(0), JsonPaths.empty()); - inOrder.verify(mock).accept(jsonWithAllTypes.get(compositeKeyword).get(1), JsonPaths.empty()); - inOrder.verify(mock).accept(jsonWithAllTypes.get(compositeKeyword).get(1).get("properties").get("prop1"), "$.prop1"); - inOrder.verify(mock).accept(jsonWithAllTypes.get(compositeKeyword).get(2), JsonPaths.empty()); - inOrder.verify(mock).accept(jsonWithAllTypes.get(compositeKeyword).get(2).get("items"), "$[*]"); - inOrder.verify(mock).accept(jsonWithAllTypes.get(compositeKeyword).get(3).get(compositeKeyword).get(0), JsonPaths.empty()); - inOrder.verify(mock).accept(jsonWithAllTypes.get(compositeKeyword).get(3).get(compositeKeyword).get(1), JsonPaths.empty()); - inOrder.verify(mock).accept(jsonWithAllTypes.get(compositeKeyword).get(3).get(compositeKeyword).get(1).get("items"), "$[*]"); + inOrder.verify(mock).accept(jsonWithAllTypes, Collections.emptyList()); + inOrder.verify(mock).accept(jsonWithAllTypes.get(compositeKeyword).get(0), Collections.emptyList()); + inOrder.verify(mock).accept(jsonWithAllTypes.get(compositeKeyword).get(1), Collections.emptyList()); + inOrder.verify(mock).accept(jsonWithAllTypes.get(compositeKeyword).get(1).get("properties").get("prop1"), + List.of(FieldNameOrList.fieldName("prop1"))); + inOrder.verify(mock).accept(jsonWithAllTypes.get(compositeKeyword).get(2), Collections.emptyList()); + inOrder.verify(mock).accept(jsonWithAllTypes.get(compositeKeyword).get(2).get("items"), List.of(FieldNameOrList.list())); + inOrder.verify(mock).accept(jsonWithAllTypes.get(compositeKeyword).get(3).get(compositeKeyword).get(0), Collections.emptyList()); + inOrder.verify(mock).accept(jsonWithAllTypes.get(compositeKeyword).get(3).get(compositeKeyword).get(1), Collections.emptyList()); + inOrder.verify(mock).accept(jsonWithAllTypes.get(compositeKeyword).get(3).get(compositeKeyword).get(1).get("items"), + List.of(FieldNameOrList.list())); inOrder.verifyNoMoreInteractions(); } @@ -89,14 +100,15 @@ void testTraverseComposite(final String compositeKeyword) throws IOException { @Test void testTraverseMultiType() throws IOException { final JsonNode jsonWithAllTypes = Jsons.deserialize(MoreResources.readResource("json_schemas/json_with_array_type_fields.json")); - final BiConsumer mock = mock(BiConsumer.class); + final BiConsumer> mock = mock(BiConsumer.class); JsonSchemas.traverseJsonSchema(jsonWithAllTypes, mock); final InOrder inOrder = Mockito.inOrder(mock); - inOrder.verify(mock).accept(jsonWithAllTypes, JsonPaths.empty()); - inOrder.verify(mock).accept(jsonWithAllTypes.get("properties").get("company"), "$.company"); - inOrder.verify(mock).accept(jsonWithAllTypes.get("items"), "$[*]"); - inOrder.verify(mock).accept(jsonWithAllTypes.get("items").get("properties").get("user"), "$[*].user"); + inOrder.verify(mock).accept(jsonWithAllTypes, Collections.emptyList()); + inOrder.verify(mock).accept(jsonWithAllTypes.get("properties").get("company"), List.of(FieldNameOrList.fieldName("company"))); + inOrder.verify(mock).accept(jsonWithAllTypes.get("items"), List.of(FieldNameOrList.list())); + inOrder.verify(mock).accept(jsonWithAllTypes.get("items").get("properties").get("user"), + List.of(FieldNameOrList.list(), FieldNameOrList.fieldName("user"))); inOrder.verifyNoMoreInteractions(); } @@ -105,17 +117,29 @@ void testTraverseMultiType() throws IOException { void testTraverseMultiTypeComposite() throws IOException { final String compositeKeyword = "anyOf"; final JsonNode jsonWithAllTypes = Jsons.deserialize(MoreResources.readResource("json_schemas/json_with_array_type_fields_with_composites.json")); - final BiConsumer mock = mock(BiConsumer.class); + final BiConsumer> mock = mock(BiConsumer.class); JsonSchemas.traverseJsonSchema(jsonWithAllTypes, mock); final InOrder inOrder = Mockito.inOrder(mock); - inOrder.verify(mock).accept(jsonWithAllTypes, JsonPaths.empty()); - inOrder.verify(mock).accept(jsonWithAllTypes.get(compositeKeyword).get(0).get("properties").get("company"), "$.company"); - inOrder.verify(mock).accept(jsonWithAllTypes.get(compositeKeyword).get(1).get("properties").get("organization"), "$.organization"); - inOrder.verify(mock).accept(jsonWithAllTypes.get("items"), "$[*]"); - inOrder.verify(mock).accept(jsonWithAllTypes.get("items").get("properties").get("user"), "$[*].user"); + inOrder.verify(mock).accept(jsonWithAllTypes, Collections.emptyList()); + inOrder.verify(mock).accept(jsonWithAllTypes.get(compositeKeyword).get(0).get("properties").get("company"), + List.of(FieldNameOrList.fieldName("company"))); + inOrder.verify(mock).accept(jsonWithAllTypes.get(compositeKeyword).get(1).get("properties").get("organization"), + List.of(FieldNameOrList.fieldName("organization"))); + inOrder.verify(mock).accept(jsonWithAllTypes.get("items"), List.of(FieldNameOrList.list())); + inOrder.verify(mock).accept(jsonWithAllTypes.get("items").get("properties").get("user"), + List.of(FieldNameOrList.list(), FieldNameOrList.fieldName("user"))); inOrder.verifyNoMoreInteractions(); } + @SuppressWarnings("unchecked") + @Test + void testTraverseArrayTypeWithNoItemsThrowsException() throws IOException { + final JsonNode jsonWithAllTypes = Jsons.deserialize(MoreResources.readResource("json_schemas/json_with_array_type_fields_no_items.json")); + final BiConsumer> mock = mock(BiConsumer.class); + + assertThrows(IllegalArgumentException.class, () -> JsonSchemas.traverseJsonSchema(jsonWithAllTypes, mock)); + } + } diff --git a/airbyte-commons/src/test/java/io/airbyte/commons/util/MoreListsTest.java b/airbyte-commons/src/test/java/io/airbyte/commons/util/MoreListsTest.java index a05db55e00c5..3243f370bc57 100644 --- a/airbyte-commons/src/test/java/io/airbyte/commons/util/MoreListsTest.java +++ b/airbyte-commons/src/test/java/io/airbyte/commons/util/MoreListsTest.java @@ -6,7 +6,6 @@ import static org.junit.jupiter.api.Assertions.assertEquals; -import com.google.common.collect.Lists; import java.util.ArrayList; import java.util.List; import java.util.Optional; @@ -28,9 +27,26 @@ void testLast() { @Test void testReverse() { - final ArrayList originalList = Lists.newArrayList(1, 2, 3); + final List originalList = List.of(1, 2, 3); assertEquals(List.of(3, 2, 1), MoreLists.reversed(originalList)); assertEquals(List.of(1, 2, 3), originalList); } + @Test + void testConcat() { + final List> lists = List.of(List.of(1, 2, 3), List.of(4, 5, 6), List.of(7, 8, 9)); + final List expected = List.of(1, 2, 3, 4, 5, 6, 7, 8, 9); + final List actual = MoreLists.concat(lists.get(0), lists.get(1), lists.get(2)); + assertEquals(expected, actual); + } + + @Test + void testAdd() { + final List originalList = List.of(1, 2, 3); + + assertEquals(List.of(1, 2, 3, 4), MoreLists.add(originalList, 4)); + // verify original list was not mutated. + assertEquals(List.of(1, 2, 3), originalList); + } + } diff --git a/airbyte-commons/src/test/resources/json_schemas/json_with_array_type_fields_no_items.json b/airbyte-commons/src/test/resources/json_schemas/json_with_array_type_fields_no_items.json new file mode 100644 index 000000000000..77393ec44816 --- /dev/null +++ b/airbyte-commons/src/test/resources/json_schemas/json_with_array_type_fields_no_items.json @@ -0,0 +1,9 @@ +{ + "type": ["object", "array"], + "properties": { + "company": { + "type": "string", + "description": "company name" + } + } +} diff --git a/airbyte-config/config-models/README.md b/airbyte-config/config-models/README.md index eb5853a318a3..692eb810d800 100644 --- a/airbyte-config/config-models/README.md +++ b/airbyte-config/config-models/README.md @@ -9,7 +9,7 @@ This module uses `jsonschema2pojo` to generate Java config objects from [json sc ``` - Run the following command under the project root: ```sh - SUB_BUILD=PLATFORM ./gradlew airbyte-config:models:generateJsonSchema2Pojo + SUB_BUILD=PLATFORM ./gradlew airbyte-config:config-models:generateJsonSchema2Pojo ``` The generated file is under: ``` diff --git a/airbyte-config/config-models/src/main/java/io/airbyte/config/Configs.java b/airbyte-config/config-models/src/main/java/io/airbyte/config/Configs.java index f1dfda304d72..d5f7d0ab8ebb 100644 --- a/airbyte-config/config-models/src/main/java/io/airbyte/config/Configs.java +++ b/airbyte-config/config-models/src/main/java/io/airbyte/config/Configs.java @@ -103,11 +103,28 @@ public interface Configs { /** * Defines the Secret Persistence type. None by default. Set to GOOGLE_SECRET_MANAGER to use Google - * Secret Manager. Set to TESTING_CONFIG_DB_TABLE to use the database as a test. Alpha support. - * Undefined behavior will result if this is turned on and then off. + * Secret Manager. Set to TESTING_CONFIG_DB_TABLE to use the database as a test. Set to VAULT to use + * Hashicorp Vault. Alpha support. Undefined behavior will result if this is turned on and then off. */ SecretPersistenceType getSecretPersistenceType(); + /** + * Define the vault address to read/write Airbyte Configuration to Hashicorp Vault. Alpha Support. + */ + String getVaultAddress(); + + /** + * Define the vault path prefix to read/write Airbyte Configuration to Hashicorp Vault. Empty by + * default. Alpha Support. + */ + String getVaultPrefix(); + + /** + * Define the vault token to read/write Airbyte Configuration to Hashicorp Vault. Empty by default. + * Alpha Support. + */ + String getVaultToken(); + // Database /** * Define the Jobs Database user. @@ -171,7 +188,36 @@ public interface Configs { */ boolean runDatabaseMigrationOnStartup(); + // Temporal Cloud - Internal-Use Only + + /** + * Define if Temporal Cloud should be used. Internal-use only. + */ + boolean temporalCloudEnabled(); + + /** + * Temporal Cloud target endpoint, usually with form ${namespace}.tmprl.cloud:7233. Internal-use + * only. + */ + String getTemporalCloudHost(); + + /** + * Temporal Cloud namespace. Internal-use only. + */ + String getTemporalCloudNamespace(); + + /** + * Temporal Cloud client cert for SSL. Internal-use only. + */ + String getTemporalCloudClientCert(); + + /** + * Temporal Cloud client key for SSL. Internal-use only. + */ + String getTemporalCloudClientKey(); + // Airbyte Services + /** * Define the url where Temporal is hosted at. Please include the port. Airbyte services use this * information. @@ -420,6 +466,17 @@ public interface Configs { */ TrackingStrategy getTrackingStrategy(); + /** + * Define whether to send job failure events to Sentry or log-only. Airbyte internal use. + */ + JobErrorReportingStrategy getJobErrorReportingStrategy(); + + /** + * Determines the Sentry DSN that should be used when reporting connector job failures to Sentry. + * Used with SENTRY error reporting strategy. Airbyte internal use. + */ + String getJobErrorReportingSentryDSN(); + // APPLICATIONS // Worker /** @@ -508,9 +565,19 @@ public interface Configs { int getMaxActivityTimeoutSecond(); /** - * Get the duration in second between 2 activity attempts + * Get initial delay in seconds between two activity attempts + */ + int getInitialDelayBetweenActivityAttemptsSeconds(); + + /** + * Get maximum delay in seconds between two activity attempts + */ + int getMaxDelayBetweenActivityAttemptsSeconds(); + + /** + * Get the delay in seconds between an activity failing and the workflow being restarted */ - int getDelayBetweenActivityAttempts(); + int getWorkflowFailureRestartDelaySeconds(); /** * Get number of attempts of the non long running activities @@ -522,6 +589,11 @@ enum TrackingStrategy { LOGGING } + enum JobErrorReportingStrategy { + SENTRY, + LOGGING + } + enum WorkerEnvironment { DOCKER, KUBERNETES @@ -535,7 +607,8 @@ enum DeploymentMode { enum SecretPersistenceType { NONE, TESTING_CONFIG_DB_TABLE, - GOOGLE_SECRET_MANAGER + GOOGLE_SECRET_MANAGER, + VAULT } } diff --git a/airbyte-config/config-models/src/main/java/io/airbyte/config/EnvConfigs.java b/airbyte-config/config-models/src/main/java/io/airbyte/config/EnvConfigs.java index 499e210582ea..adb6e69edec3 100644 --- a/airbyte-config/config-models/src/main/java/io/airbyte/config/EnvConfigs.java +++ b/airbyte-config/config-models/src/main/java/io/airbyte/config/EnvConfigs.java @@ -50,6 +50,8 @@ public class EnvConfigs implements Configs { public static final String CONFIG_ROOT = "CONFIG_ROOT"; public static final String DOCKER_NETWORK = "DOCKER_NETWORK"; public static final String TRACKING_STRATEGY = "TRACKING_STRATEGY"; + public static final String JOB_ERROR_REPORTING_STRATEGY = "JOB_ERROR_REPORTING_STRATEGY"; + public static final String JOB_ERROR_REPORTING_SENTRY_DSN = "JOB_ERROR_REPORTING_SENTRY_DSN"; public static final String DEPLOYMENT_MODE = "DEPLOYMENT_MODE"; public static final String DATABASE_USER = "DATABASE_USER"; public static final String DATABASE_PASSWORD = "DATABASE_PASSWORD"; @@ -109,9 +111,17 @@ public class EnvConfigs implements Configs { public static final String STATE_STORAGE_GCS_BUCKET_NAME = "STATE_STORAGE_GCS_BUCKET_NAME"; public static final String STATE_STORAGE_GCS_APPLICATION_CREDENTIALS = "STATE_STORAGE_GCS_APPLICATION_CREDENTIALS"; + private static final String TEMPORAL_CLOUD_ENABLED = "TEMPORAL_CLOUD_ENABLED"; + private static final String TEMPORAL_CLOUD_HOST = "TEMPORAL_CLOUD_HOST"; + private static final String TEMPORAL_CLOUD_NAMESPACE = "TEMPORAL_CLOUD_NAMESPACE"; + private static final String TEMPORAL_CLOUD_CLIENT_CERT = "TEMPORAL_CLOUD_CLIENT_CERT"; + private static final String TEMPORAL_CLOUD_CLIENT_KEY = "TEMPORAL_CLOUD_CLIENT_KEY"; + public static final String ACTIVITY_MAX_TIMEOUT_SECOND = "ACTIVITY_MAX_TIMEOUT_SECOND"; public static final String ACTIVITY_MAX_ATTEMPT = "ACTIVITY_MAX_ATTEMPT"; - public static final String ACTIVITY_DELAY_IN_SECOND_BETWEEN_ATTEMPTS = "ACTIVITY_DELAY_IN_SECOND_BETWEEN_ATTEMPTS"; + public static final String ACTIVITY_INITIAL_DELAY_BETWEEN_ATTEMPTS_SECONDS = "ACTIVITY_INITIAL_DELAY_BETWEEN_ATTEMPTS_SECONDS"; + public static final String ACTIVITY_MAX_DELAY_BETWEEN_ATTEMPTS_SECONDS = "ACTIVITY_MAX_DELAY_BETWEEN_ATTEMPTS_SECONDS"; + public static final String WORKFLOW_FAILURE_RESTART_DELAY_SECONDS = "WORKFLOW_FAILURE_RESTART_DELAY_SECONDS"; private static final String SHOULD_RUN_GET_SPEC_WORKFLOWS = "SHOULD_RUN_GET_SPEC_WORKFLOWS"; private static final String SHOULD_RUN_CHECK_CONNECTION_WORKFLOWS = "SHOULD_RUN_CHECK_CONNECTION_WORKFLOWS"; @@ -157,6 +167,10 @@ public class EnvConfigs implements Configs { private static final String DEFAULT_JOB_KUBE_CURL_IMAGE = "curlimages/curl:7.83.1"; private static final int DEFAULT_DATABASE_INITIALIZATION_TIMEOUT_MS = 60 * 1000; + private static final String VAULT_ADDRESS = "VAULT_ADDRESS"; + private static final String VAULT_PREFIX = "VAULT_PREFIX"; + private static final String VAULT_AUTH_TOKEN = "VAULT_AUTH_TOKEN"; + public static final long DEFAULT_MAX_SPEC_WORKERS = 5; public static final long DEFAULT_MAX_CHECK_WORKERS = 5; public static final long DEFAULT_MAX_DISCOVER_WORKERS = 5; @@ -167,6 +181,7 @@ public class EnvConfigs implements Configs { public static final Map> JOB_SHARED_ENVS = Map.of( AIRBYTE_VERSION, (instance) -> instance.getAirbyteVersion().serialize(), AIRBYTE_ROLE, EnvConfigs::getAirbyteRole, + DEPLOYMENT_MODE, (instance) -> instance.getDeploymentMode().name(), WORKER_ENVIRONMENT, (instance) -> instance.getWorkerEnvironment().name()); public static final int DEFAULT_TEMPORAL_HISTORY_RETENTION_IN_DAYS = 30; @@ -328,6 +343,21 @@ public SecretPersistenceType getSecretPersistenceType() { return SecretPersistenceType.valueOf(secretPersistenceStr); } + @Override + public String getVaultAddress() { + return getEnv(VAULT_ADDRESS); + } + + @Override + public String getVaultPrefix() { + return getEnvOrDefault(VAULT_PREFIX, ""); + } + + @Override + public String getVaultToken() { + return getEnv(VAULT_AUTH_TOKEN); + } + // Database @Override public String getDatabaseUser() { @@ -387,6 +417,32 @@ public boolean runDatabaseMigrationOnStartup() { return getEnvOrDefault(RUN_DATABASE_MIGRATION_ON_STARTUP, true); } + // Temporal Cloud + @Override + public boolean temporalCloudEnabled() { + return getEnvOrDefault(TEMPORAL_CLOUD_ENABLED, false); + } + + @Override + public String getTemporalCloudHost() { + return getEnvOrDefault(TEMPORAL_CLOUD_HOST, ""); + } + + @Override + public String getTemporalCloudNamespace() { + return getEnvOrDefault(TEMPORAL_CLOUD_NAMESPACE, ""); + } + + @Override + public String getTemporalCloudClientCert() { + return getEnvOrDefault(TEMPORAL_CLOUD_CLIENT_CERT, ""); + } + + @Override + public String getTemporalCloudClientKey() { + return getEnvOrDefault(TEMPORAL_CLOUD_CLIENT_KEY, ""); + } + // Airbyte Services @Override public String getTemporalHost() { @@ -751,6 +807,23 @@ public TrackingStrategy getTrackingStrategy() { }); } + @Override + public JobErrorReportingStrategy getJobErrorReportingStrategy() { + return getEnvOrDefault(JOB_ERROR_REPORTING_STRATEGY, JobErrorReportingStrategy.LOGGING, s -> { + try { + return JobErrorReportingStrategy.valueOf(s.toUpperCase()); + } catch (final IllegalArgumentException e) { + LOGGER.info(s + " not recognized, defaulting to " + JobErrorReportingStrategy.LOGGING); + return JobErrorReportingStrategy.LOGGING; + } + }); + } + + @Override + public String getJobErrorReportingSentryDSN() { + return getEnvOrDefault(JOB_ERROR_REPORTING_SENTRY_DSN, ""); + } + // APPLICATIONS // Worker @Override @@ -842,13 +915,23 @@ public int getMaxActivityTimeoutSecond() { } @Override - public int getDelayBetweenActivityAttempts() { - return Integer.parseInt(getEnvOrDefault(ACTIVITY_MAX_TIMEOUT_SECOND, "30")); + public int getInitialDelayBetweenActivityAttemptsSeconds() { + return Integer.parseInt(getEnvOrDefault(ACTIVITY_INITIAL_DELAY_BETWEEN_ATTEMPTS_SECONDS, "30")); + } + + @Override + public int getMaxDelayBetweenActivityAttemptsSeconds() { + return Integer.parseInt(getEnvOrDefault(ACTIVITY_MAX_DELAY_BETWEEN_ATTEMPTS_SECONDS, String.valueOf(10 * 60))); + } + + @Override + public int getWorkflowFailureRestartDelaySeconds() { + return Integer.parseInt(getEnvOrDefault(WORKFLOW_FAILURE_RESTART_DELAY_SECONDS, String.valueOf(10 * 60))); } @Override public int getActivityNumberOfAttempt() { - return Integer.parseInt(getEnvOrDefault(ACTIVITY_MAX_ATTEMPT, "10")); + return Integer.parseInt(getEnvOrDefault(ACTIVITY_MAX_ATTEMPT, "5")); } // Helpers diff --git a/airbyte-config/config-models/src/main/java/io/airbyte/config/helpers/StateMessageHelper.java b/airbyte-config/config-models/src/main/java/io/airbyte/config/helpers/StateMessageHelper.java new file mode 100644 index 000000000000..d79ca2ca4010 --- /dev/null +++ b/airbyte-config/config-models/src/main/java/io/airbyte/config/helpers/StateMessageHelper.java @@ -0,0 +1,134 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.config.helpers; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.collect.Iterables; +import io.airbyte.commons.json.Jsons; +import io.airbyte.config.State; +import io.airbyte.config.StateType; +import io.airbyte.config.StateWrapper; +import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; +import java.util.List; +import java.util.Optional; + +public class StateMessageHelper { + + public static class AirbyteStateMessageListTypeReference extends TypeReference> {} + + /** + * This a takes a json blob state and tries return either a legacy state in the format of a json + * object or a state message with the new format which is a list of airbyte state message. + * + * @param state - a blob representing the state + * @return An optional state wrapper, if there is no state an empty optional will be returned + */ + public static Optional getTypedState(final JsonNode state, final boolean useStreamCapableState) { + if (state == null) { + return Optional.empty(); + } else { + final List stateMessages; + try { + stateMessages = Jsons.object(state, new AirbyteStateMessageListTypeReference()); + } catch (final IllegalArgumentException e) { + return Optional.of(getLegacyStateWrapper(state)); + } + if (stateMessages.size() == 0) { + return Optional.empty(); + } + + if (stateMessages.size() == 1) { + if (stateMessages.get(0).getType() == null) { + return Optional.of(getLegacyStateWrapper(state)); + } else { + switch (stateMessages.get(0).getType()) { + case GLOBAL -> { + return Optional.of(provideGlobalState(stateMessages.get(0), useStreamCapableState)); + } + case STREAM -> { + return Optional.of(provideStreamState(stateMessages, useStreamCapableState)); + } + case LEGACY -> { + return Optional.of(getLegacyStateWrapper(stateMessages.get(0).getData())); + } + default -> { + // Should not be reachable. + throw new IllegalStateException("Unexpected state type"); + } + } + } + } else { + if (stateMessages.stream().allMatch(stateMessage -> stateMessage.getType() == AirbyteStateType.STREAM)) { + return Optional.of(provideStreamState(stateMessages, useStreamCapableState)); + } + if (stateMessages.stream().allMatch(stateMessage -> stateMessage.getType() == null)) { + return Optional.of(getLegacyStateWrapper(state)); + } + + throw new IllegalStateException("Unexpected state blob, the state contains either multiple global or conflicting state type."); + + } + } + } + + /** + * Converts a StateWrapper to a State + * + * LegacyStates are directly serialized into the state. GlobalStates and StreamStates are serialized + * as a list of AirbyteStateMessage in the state attribute. + * + * @param stateWrapper the StateWrapper to convert + * @return the Converted State + */ + @SuppressWarnings("UnnecessaryDefault") + public static State getState(final StateWrapper stateWrapper) { + return switch (stateWrapper.getStateType()) { + case LEGACY -> new State().withState(stateWrapper.getLegacyState()); + case STREAM -> new State().withState(Jsons.jsonNode(stateWrapper.getStateMessages())); + case GLOBAL -> new State().withState(Jsons.jsonNode(List.of(stateWrapper.getGlobal()))); + default -> throw new RuntimeException("Unexpected StateType " + stateWrapper.getStateType()); + }; + } + + private static StateWrapper provideGlobalState(final AirbyteStateMessage stateMessages, final boolean useStreamCapableState) { + if (useStreamCapableState) { + return new StateWrapper() + .withStateType(StateType.GLOBAL) + .withGlobal(stateMessages); + } else { + return new StateWrapper() + .withStateType(StateType.LEGACY) + .withLegacyState(stateMessages.getData()); + } + } + + /** + * This is returning a wrapped state, it assumes that the state messages are ordered. + * + * @param stateMessages - an ordered list of state message + * @param useStreamCapableState - a flag that indicates whether to return the new format + * @return a wrapped state + */ + private static StateWrapper provideStreamState(final List stateMessages, final boolean useStreamCapableState) { + if (useStreamCapableState) { + return new StateWrapper() + .withStateType(StateType.STREAM) + .withStateMessages(stateMessages); + } else { + return new StateWrapper() + .withStateType(StateType.LEGACY) + .withLegacyState(Iterables.getLast(stateMessages).getData()); + } + } + + private static StateWrapper getLegacyStateWrapper(final JsonNode state) { + return new StateWrapper() + .withStateType(StateType.LEGACY) + .withLegacyState(state); + } + +} diff --git a/airbyte-config/config-models/src/main/resources/types/JobResetConnectionConfig.yaml b/airbyte-config/config-models/src/main/resources/types/JobResetConnectionConfig.yaml index e58c9e91939b..32696ab6afb6 100644 --- a/airbyte-config/config-models/src/main/resources/types/JobResetConnectionConfig.yaml +++ b/airbyte-config/config-models/src/main/resources/types/JobResetConnectionConfig.yaml @@ -39,3 +39,8 @@ properties: type: object description: optional resource requirements to run sync workers existingJavaType: io.airbyte.config.ResourceRequirements + resetSourceConfiguration: + "$ref": ResetSourceConfiguration.yaml + state: + description: optional current state of the connection + "$ref": State.yaml diff --git a/airbyte-config/config-models/src/main/resources/types/ResetSourceConfiguration.yaml b/airbyte-config/config-models/src/main/resources/types/ResetSourceConfiguration.yaml new file mode 100644 index 000000000000..facea3cc60da --- /dev/null +++ b/airbyte-config/config-models/src/main/resources/types/ResetSourceConfiguration.yaml @@ -0,0 +1,15 @@ +--- +"$schema": http://json-schema.org/draft-07/schema# +"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/ResetSourceConfiguration.yaml +title: ResetSourceConfiguration +description: configuration of the reset source +type: object +additionalProperties: false +required: + - streamsToReset +properties: + streamsToReset: + type: array + items: + type: object + existingJavaType: io.airbyte.protocol.models.StreamDescriptor diff --git a/airbyte-config/config-models/src/main/resources/types/StateType.yaml b/airbyte-config/config-models/src/main/resources/types/StateType.yaml new file mode 100644 index 000000000000..6a5534a21bc8 --- /dev/null +++ b/airbyte-config/config-models/src/main/resources/types/StateType.yaml @@ -0,0 +1,10 @@ +--- +"$schema": http://json-schema.org/draft-07/schema# +"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/StateType.yaml +title: StateType +description: State Types +type: string +enum: + - global + - stream + - legacy diff --git a/airbyte-config/config-models/src/main/resources/types/StateWrapper.yaml b/airbyte-config/config-models/src/main/resources/types/StateWrapper.yaml new file mode 100644 index 000000000000..22a7501d3897 --- /dev/null +++ b/airbyte-config/config-models/src/main/resources/types/StateWrapper.yaml @@ -0,0 +1,26 @@ +--- +"$schema": http://json-schema.org/draft-07/schema# +"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/StateWrapper.yaml +title: StateWrapper +description: Wrapper around the different type of States +type: object +additionalProperties: false +required: + - stateType +properties: + stateType: + description: The type of the state being wrapped + "$ref": StateType.yaml + legacyState: + description: Legacy State for states that haven't been migrated yet + type: object + existingJavaType: com.fasterxml.jackson.databind.JsonNode + global: + description: Representation of the shared + type: object + existingJavaType: io.airbyte.protocol.models.AirbyteStateMessage + stateMessages: + type: array + items: + type: object + existingJavaType: io.airbyte.protocol.models.AirbyteStateMessage diff --git a/airbyte-config/config-models/src/main/resources/types/StreamDescriptor.yaml b/airbyte-config/config-models/src/main/resources/types/StreamDescriptor.yaml deleted file mode 100644 index 41c5793883be..000000000000 --- a/airbyte-config/config-models/src/main/resources/types/StreamDescriptor.yaml +++ /dev/null @@ -1,16 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/StreamDescriptor.yaml -title: StreamDescriptor -description: Name and namespace of a stream -type: object -required: - - name -additionalProperties: false -properties: - name: - description: Stream name - type: String - namespace: - description: Stream namespace - type: String diff --git a/airbyte-config/config-models/src/test/java/io/airbyte/config/EnvConfigsTest.java b/airbyte-config/config-models/src/test/java/io/airbyte/config/EnvConfigsTest.java index 995e74f6f667..d44510c868fb 100644 --- a/airbyte-config/config-models/src/test/java/io/airbyte/config/EnvConfigsTest.java +++ b/airbyte-config/config-models/src/test/java/io/airbyte/config/EnvConfigsTest.java @@ -7,6 +7,8 @@ import static org.junit.jupiter.api.Assertions.*; import io.airbyte.commons.version.AirbyteVersion; +import io.airbyte.config.Configs.DeploymentMode; +import io.airbyte.config.Configs.JobErrorReportingStrategy; import io.airbyte.config.Configs.WorkerEnvironment; import java.nio.file.Paths; import java.util.HashMap; @@ -177,6 +179,27 @@ void testTrackingStrategy() { assertEquals(Configs.TrackingStrategy.LOGGING, config.getTrackingStrategy()); } + @Test + void testErrorReportingStrategy() { + envMap.put(EnvConfigs.JOB_ERROR_REPORTING_STRATEGY, null); + assertEquals(JobErrorReportingStrategy.LOGGING, config.getJobErrorReportingStrategy()); + + envMap.put(EnvConfigs.JOB_ERROR_REPORTING_STRATEGY, "abc"); + assertEquals(JobErrorReportingStrategy.LOGGING, config.getJobErrorReportingStrategy()); + + envMap.put(EnvConfigs.JOB_ERROR_REPORTING_STRATEGY, "logging"); + assertEquals(JobErrorReportingStrategy.LOGGING, config.getJobErrorReportingStrategy()); + + envMap.put(EnvConfigs.JOB_ERROR_REPORTING_STRATEGY, "sentry"); + assertEquals(JobErrorReportingStrategy.SENTRY, config.getJobErrorReportingStrategy()); + + envMap.put(EnvConfigs.JOB_ERROR_REPORTING_STRATEGY, "LOGGING"); + assertEquals(JobErrorReportingStrategy.LOGGING, config.getJobErrorReportingStrategy()); + + envMap.put(EnvConfigs.JOB_ERROR_REPORTING_STRATEGY, "SENTRY"); + assertEquals(JobErrorReportingStrategy.SENTRY, config.getJobErrorReportingStrategy()); + } + @Test void testDeploymentMode() { envMap.put(EnvConfigs.DEPLOYMENT_MODE, null); @@ -409,6 +432,7 @@ void testSharedJobEnvMapRetrieval() { envMap.put(EnvConfigs.WORKER_ENVIRONMENT, WorkerEnvironment.KUBERNETES.name()); final Map expected = Map.of("AIRBYTE_VERSION", DEV, "AIRBYTE_ROLE", "", + "DEPLOYMENT_MODE", "OSS", "WORKER_ENVIRONMENT", "KUBERNETES"); assertEquals(expected, config.getJobDefaultEnvMap()); } @@ -419,11 +443,13 @@ void testAllJobEnvMapRetrieval() { envMap.put(EnvConfigs.AIRBYTE_ROLE, "UNIT_TEST"); envMap.put(EnvConfigs.JOB_DEFAULT_ENV_PREFIX + "ENV1", "VAL1"); envMap.put(EnvConfigs.JOB_DEFAULT_ENV_PREFIX + "ENV2", "VAL\"2WithQuotesand$ymbols"); + envMap.put(EnvConfigs.DEPLOYMENT_MODE, DeploymentMode.CLOUD.name()); final Map expected = Map.of("ENV1", "VAL1", "ENV2", "VAL\"2WithQuotesand$ymbols", "AIRBYTE_VERSION", DEV, "AIRBYTE_ROLE", "UNIT_TEST", + "DEPLOYMENT_MODE", "CLOUD", "WORKER_ENVIRONMENT", "DOCKER"); assertEquals(expected, config.getJobDefaultEnvMap()); } diff --git a/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/StateMessageHelperTest.java b/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/StateMessageHelperTest.java new file mode 100644 index 000000000000..fc9f50f3bc53 --- /dev/null +++ b/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/StateMessageHelperTest.java @@ -0,0 +1,252 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.config.helpers; + +import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.commons.json.Jsons; +import io.airbyte.config.State; +import io.airbyte.config.StateType; +import io.airbyte.config.StateWrapper; +import io.airbyte.protocol.models.AirbyteGlobalState; +import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; +import io.airbyte.protocol.models.AirbyteStreamState; +import io.airbyte.protocol.models.StreamDescriptor; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.Test; + +public class StateMessageHelperTest { + + private static final boolean USE_STREAM_CAPABLE_STATE = true; + private static final boolean DONT_USE_STREAM_CAPABALE_STATE = false; + + @Test + public void testEmpty() { + final Optional stateWrapper = StateMessageHelper.getTypedState(null, USE_STREAM_CAPABLE_STATE); + Assertions.assertThat(stateWrapper).isEmpty(); + } + + @Test + public void testEmptyList() { + final Optional stateWrapper = StateMessageHelper.getTypedState(Jsons.arrayNode(), USE_STREAM_CAPABLE_STATE); + Assertions.assertThat(stateWrapper).isEmpty(); + } + + @Test + public void testLegacy() { + final Optional stateWrapper = StateMessageHelper.getTypedState(Jsons.emptyObject(), USE_STREAM_CAPABLE_STATE); + Assertions.assertThat(stateWrapper).isNotEmpty(); + Assertions.assertThat(stateWrapper.get().getStateType()).isEqualTo(StateType.LEGACY); + } + + @Test + public void testLegacyInList() { + final JsonNode jsonState = Jsons.jsonNode(List.of(Map.of("Any", "value"))); + + final Optional stateWrapper = StateMessageHelper.getTypedState(jsonState, USE_STREAM_CAPABLE_STATE); + Assertions.assertThat(stateWrapper).isNotEmpty(); + Assertions.assertThat(stateWrapper.get().getStateType()).isEqualTo(StateType.LEGACY); + Assertions.assertThat(stateWrapper.get().getLegacyState()).isEqualTo(jsonState); + } + + @Test + public void testLegacyInNewFormat() { + final AirbyteStateMessage stateMessage = new AirbyteStateMessage() + .withType(AirbyteStateType.LEGACY) + .withData(Jsons.emptyObject()); + final Optional stateWrapper = StateMessageHelper.getTypedState(Jsons.jsonNode(List.of(stateMessage)), USE_STREAM_CAPABLE_STATE); + Assertions.assertThat(stateWrapper).isNotEmpty(); + Assertions.assertThat(stateWrapper.get().getStateType()).isEqualTo(StateType.LEGACY); + } + + @Test + public void testGlobal() { + final AirbyteStateMessage stateMessage = new AirbyteStateMessage() + .withType(AirbyteStateType.GLOBAL) + .withGlobal( + new AirbyteGlobalState() + .withSharedState(Jsons.emptyObject()) + .withStreamStates(List.of( + new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("a")).withStreamState(Jsons.emptyObject()), + new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("b")).withStreamState(Jsons.emptyObject())))); + final Optional stateWrapper = + StateMessageHelper.getTypedState(Jsons.jsonNode(List.of(stateMessage)), USE_STREAM_CAPABLE_STATE); + Assertions.assertThat(stateWrapper).isNotEmpty(); + Assertions.assertThat(stateWrapper.get().getStateType()).isEqualTo(StateType.GLOBAL); + Assertions.assertThat(stateWrapper.get().getGlobal()).isEqualTo(stateMessage); + } + + @Test + public void testGlobalForceLegacy() { + final JsonNode legacyState = Jsons.jsonNode(1); + final AirbyteStateMessage stateMessage = new AirbyteStateMessage() + .withType(AirbyteStateType.GLOBAL) + .withGlobal( + new AirbyteGlobalState() + .withSharedState(Jsons.emptyObject()) + .withStreamStates(List.of( + new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("a")).withStreamState(Jsons.emptyObject()), + new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("b")).withStreamState(Jsons.emptyObject())))) + .withData(legacyState); + final Optional stateWrapper = + StateMessageHelper.getTypedState(Jsons.jsonNode(List.of(stateMessage)), DONT_USE_STREAM_CAPABALE_STATE); + Assertions.assertThat(stateWrapper).isNotEmpty(); + Assertions.assertThat(stateWrapper.get().getStateType()).isEqualTo(StateType.LEGACY); + Assertions.assertThat(stateWrapper.get().getLegacyState()).isEqualTo(legacyState); + } + + @Test + public void testStream() { + final AirbyteStateMessage stateMessage1 = new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream( + new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("a")).withStreamState(Jsons.emptyObject())); + final AirbyteStateMessage stateMessage2 = new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream( + new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("b")).withStreamState(Jsons.emptyObject())); + final Optional stateWrapper = + StateMessageHelper.getTypedState(Jsons.jsonNode(List.of(stateMessage1, stateMessage2)), USE_STREAM_CAPABLE_STATE); + Assertions.assertThat(stateWrapper).isNotEmpty(); + Assertions.assertThat(stateWrapper.get().getStateType()).isEqualTo(StateType.STREAM); + Assertions.assertThat(stateWrapper.get().getStateMessages()).containsExactlyInAnyOrder(stateMessage1, stateMessage2); + } + + @Test + public void testStreamForceLegacy() { + final JsonNode firstEmittedLegacyState = Jsons.jsonNode(1); + final AirbyteStateMessage stateMessage1 = new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream( + new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("a")).withStreamState(Jsons.emptyObject())) + .withData(firstEmittedLegacyState); + final JsonNode secondEmittedLegacyState = Jsons.jsonNode(2); + final AirbyteStateMessage stateMessage2 = new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream( + new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("b")).withStreamState(Jsons.emptyObject())) + .withData(secondEmittedLegacyState); + final Optional stateWrapper = + StateMessageHelper.getTypedState(Jsons.jsonNode(List.of(stateMessage1, stateMessage2)), DONT_USE_STREAM_CAPABALE_STATE); + Assertions.assertThat(stateWrapper).isNotEmpty(); + Assertions.assertThat(stateWrapper.get().getStateType()).isEqualTo(StateType.LEGACY); + Assertions.assertThat(stateWrapper.get().getLegacyState()).isEqualTo(secondEmittedLegacyState); + } + + @Test + public void testInvalidMixedState() { + final AirbyteStateMessage stateMessage1 = new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream( + new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("a")).withStreamState(Jsons.emptyObject())); + final AirbyteStateMessage stateMessage2 = new AirbyteStateMessage() + .withType(AirbyteStateType.GLOBAL) + .withGlobal( + new AirbyteGlobalState() + .withSharedState(Jsons.emptyObject()) + .withStreamStates(List.of( + new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("a")).withStreamState(Jsons.emptyObject()), + new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("b")).withStreamState(Jsons.emptyObject())))); + Assertions + .assertThatThrownBy( + () -> StateMessageHelper.getTypedState(Jsons.jsonNode(List.of(stateMessage1, stateMessage2)), USE_STREAM_CAPABLE_STATE)) + .isInstanceOf(IllegalStateException.class); + } + + @Test + public void testDuplicatedGlobalState() { + final AirbyteStateMessage stateMessage1 = new AirbyteStateMessage() + .withType(AirbyteStateType.GLOBAL) + .withGlobal( + new AirbyteGlobalState() + .withSharedState(Jsons.emptyObject()) + .withStreamStates(List.of( + new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("a")).withStreamState(Jsons.emptyObject()), + new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("b")).withStreamState(Jsons.emptyObject())))); + final AirbyteStateMessage stateMessage2 = new AirbyteStateMessage() + .withType(AirbyteStateType.GLOBAL) + .withGlobal( + new AirbyteGlobalState() + .withSharedState(Jsons.emptyObject()) + .withStreamStates(List.of( + new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("a")).withStreamState(Jsons.emptyObject()), + new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("b")).withStreamState(Jsons.emptyObject())))); + Assertions + .assertThatThrownBy( + () -> StateMessageHelper.getTypedState(Jsons.jsonNode(List.of(stateMessage1, stateMessage2)), USE_STREAM_CAPABLE_STATE)) + .isInstanceOf(IllegalStateException.class); + } + + @Test + public void testLegacyStateConversion() { + final StateWrapper stateWrapper = new StateWrapper() + .withStateType(StateType.LEGACY) + .withLegacyState(Jsons.deserialize("{\"json\": \"blob\"}")); + final State expectedState = new State().withState(Jsons.deserialize("{\"json\": \"blob\"}")); + + final State convertedState = StateMessageHelper.getState(stateWrapper); + Assertions.assertThat(convertedState).isEqualTo(expectedState); + } + + @Test + public void testGlobalStateConversion() { + final StateWrapper stateWrapper = new StateWrapper() + .withStateType(StateType.GLOBAL) + .withGlobal( + new AirbyteStateMessage().withType(AirbyteStateType.GLOBAL).withGlobal( + new AirbyteGlobalState() + .withSharedState(Jsons.deserialize("\"shared\"")) + .withStreamStates(Collections.singletonList( + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withNamespace("ns").withName("name")) + .withStreamState(Jsons.deserialize("\"stream state\"")))))); + final State expectedState = new State().withState(Jsons.deserialize( + """ + [{ + "type":"GLOBAL", + "global":{ + "shared_state":"shared", + "stream_states":[ + {"stream_descriptor":{"name":"name","namespace":"ns"},"stream_state":"stream state"} + ] + } + }] + """)); + + final State convertedState = StateMessageHelper.getState(stateWrapper); + Assertions.assertThat(convertedState).isEqualTo(expectedState); + } + + @Test + public void testStreamStateConversion() { + final StateWrapper stateWrapper = new StateWrapper() + .withStateType(StateType.STREAM) + .withStateMessages(Arrays.asList( + new AirbyteStateMessage().withType(AirbyteStateType.STREAM).withStream( + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withNamespace("ns1").withName("name1")) + .withStreamState(Jsons.deserialize("\"state1\""))), + new AirbyteStateMessage().withType(AirbyteStateType.STREAM).withStream( + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withNamespace("ns2").withName("name2")) + .withStreamState(Jsons.deserialize("\"state2\""))))); + final State expectedState = new State().withState(Jsons.deserialize( + """ + [ + {"type":"STREAM","stream":{"stream_descriptor":{"name":"name1","namespace":"ns1"},"stream_state":"state1"}}, + {"type":"STREAM","stream":{"stream_descriptor":{"name":"name2","namespace":"ns2"},"stream_state":"state2"}} + ] + """)); + + final State convertedState = StateMessageHelper.getState(stateWrapper); + Assertions.assertThat(convertedState).isEqualTo(expectedState); + } + +} diff --git a/airbyte-config/config-persistence/build.gradle b/airbyte-config/config-persistence/build.gradle index 4661533de4be..ca7490079ac4 100644 --- a/airbyte-config/config-persistence/build.gradle +++ b/airbyte-config/config-persistence/build.gradle @@ -14,11 +14,13 @@ dependencies { implementation 'commons-io:commons-io:2.7' implementation 'com.google.cloud:google-cloud-secretmanager:2.0.5' + implementation 'com.bettercloud:vault-java-driver:5.1.0' testImplementation 'org.hamcrest:hamcrest-all:1.3' - testImplementation libs.testcontainers.postgresql + testImplementation libs.platform.testcontainers.postgresql testImplementation libs.flyway.core testImplementation project(':airbyte-test-utils') + testImplementation "org.testcontainers:vault:1.17.2" integrationTestJavaImplementation project(':airbyte-config:config-persistence') } diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java index 676b52e6c90c..6ec11ff22103 100644 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java +++ b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java @@ -45,6 +45,8 @@ import io.airbyte.db.instance.configs.jooq.generated.enums.StatusType; import io.airbyte.metrics.lib.MetricQueries; import io.airbyte.protocol.models.AirbyteCatalog; +import io.airbyte.protocol.models.CatalogHelpers; +import io.airbyte.protocol.models.StreamDescriptor; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; import java.time.OffsetDateTime; @@ -155,7 +157,7 @@ public void writeStandardWorkspace(final StandardWorkspace workspace) throws Jso } public void setFeedback(final UUID workflowId) throws JsonValidationException, ConfigNotFoundException, IOException { - final StandardWorkspace workspace = this.getStandardWorkspace(workflowId, false); + final StandardWorkspace workspace = getStandardWorkspace(workflowId, false); workspace.setFeedbackDone(true); @@ -757,20 +759,8 @@ public List listDestinationOAuthParam() throws JsonVa return persistence.listConfigs(ConfigSchema.DESTINATION_OAUTH_PARAM, DestinationOAuthParameter.class); } - public Optional getConnectionState(final UUID connectionId) throws IOException { - try { - final StandardSyncState connectionState = persistence.getConfig( - ConfigSchema.STANDARD_SYNC_STATE, - connectionId.toString(), - StandardSyncState.class); - return Optional.of(connectionState.getState()); - } catch (final ConfigNotFoundException e) { - return Optional.empty(); - } catch (final JsonValidationException e) { - throw new IllegalStateException(e); - } - } - + @Deprecated(forRemoval = true) + // use StatePersistence instead public void updateConnectionState(final UUID connectionId, final State state) throws IOException { LOGGER.info("Updating connection {} state: {}", connectionId, state); final StandardSyncState connectionState = new StandardSyncState().withConnectionId(connectionId).withState(state); @@ -993,4 +983,10 @@ public void writeWorkspaceServiceAccountNoSecrets(final WorkspaceServiceAccount workspaceServiceAccount); } + public List getAllStreamsForConnection(final UUID connectionId) + throws JsonValidationException, ConfigNotFoundException, IOException { + final StandardSync standardSync = getStandardSync(connectionId); + return CatalogHelpers.extractStreamDescriptors(standardSync.getCatalog()); + } + } diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/DatabaseConfigPersistence.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/DatabaseConfigPersistence.java index 7f3dde86dbb2..04635bfbee32 100644 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/DatabaseConfigPersistence.java +++ b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/DatabaseConfigPersistence.java @@ -51,6 +51,7 @@ import io.airbyte.db.Database; import io.airbyte.db.ExceptionWrappingDatabase; import io.airbyte.db.instance.configs.jooq.generated.enums.ActorType; +import io.airbyte.db.instance.configs.jooq.generated.enums.ReleaseStage; import io.airbyte.protocol.models.ConnectorSpecification; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; @@ -1686,6 +1687,8 @@ private Set getConnectorRepositoriesInUse(final DSLContext ctx) { Map getConnectorRepositoryToInfoMap(final DSLContext ctx) { return ctx.select(asterisk()) .from(ACTOR_DEFINITION) + .where(ACTOR_DEFINITION.RELEASE_STAGE.isNull() + .or(ACTOR_DEFINITION.RELEASE_STAGE.ne(ReleaseStage.custom).or(ACTOR_DEFINITION.CUSTOM))) .fetch() .stream() .collect(Collectors.toMap( diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StatePersistence.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StatePersistence.java new file mode 100644 index 000000000000..e21becfdf1d9 --- /dev/null +++ b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StatePersistence.java @@ -0,0 +1,327 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.config.persistence; + +import static io.airbyte.db.instance.configs.jooq.generated.Tables.STATE; + +import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.commons.enums.Enums; +import io.airbyte.commons.json.Jsons; +import io.airbyte.config.State; +import io.airbyte.config.StateType; +import io.airbyte.config.StateWrapper; +import io.airbyte.db.Database; +import io.airbyte.db.ExceptionWrappingDatabase; +import io.airbyte.protocol.models.AirbyteGlobalState; +import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; +import io.airbyte.protocol.models.AirbyteStreamState; +import io.airbyte.protocol.models.StreamDescriptor; +import java.io.IOException; +import java.time.OffsetDateTime; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.UUID; +import java.util.stream.Collectors; +import org.jooq.Condition; +import org.jooq.DSLContext; +import org.jooq.Field; +import org.jooq.JSONB; +import org.jooq.Record; +import org.jooq.RecordMapper; +import org.jooq.impl.DSL; + +/** + * State Persistence + * + * Handle persisting States to the Database. + * + * Supports migration from Legacy to Global or Stream. Other type migrations need to go through a + * reset. (an exception will be thrown) + */ +public class StatePersistence { + + private final ExceptionWrappingDatabase database; + + public StatePersistence(final Database database) { + this.database = new ExceptionWrappingDatabase(database); + } + + /** + * Get the current State of a Connection + * + * @param connectionId + * @return + * @throws IOException + */ + public Optional getCurrentState(final UUID connectionId) throws IOException { + final List records = this.database.query(ctx -> getStateRecords(ctx, connectionId)); + + if (records.isEmpty()) { + return Optional.empty(); + } + + return switch (getStateType(connectionId, records)) { + case GLOBAL -> Optional.of(buildGlobalState(records)); + case STREAM -> Optional.of(buildStreamState(records)); + default -> Optional.of(buildLegacyState(records)); + }; + } + + /** + * Create or update the states described in the StateWrapper. Null states will be deleted. + * + * The only state migrations supported are going from a Legacy state to either a Global or Stream + * state. Other state type migrations should go through an explicit reset. An exception will be + * thrown to prevent the system from getting into a bad state. + * + * @param connectionId + * @param state + * @throws IOException + */ + public void updateOrCreateState(final UUID connectionId, final StateWrapper state) throws IOException { + final Optional previousState = getCurrentState(connectionId); + final boolean isMigration = previousState.isPresent() && previousState.get().getStateType() == StateType.LEGACY && + state.getStateType() != StateType.LEGACY; + + // The only case where we allow a state migration is moving from LEGACY. + // We expect any other migration to go through an explicit reset. + if (!isMigration && previousState.isPresent() && previousState.get().getStateType() != state.getStateType()) { + throw new IllegalStateException("Unexpected type migration from '" + previousState.get().getStateType() + "' to '" + state.getStateType() + + "'. Migration of StateType need to go through an explicit reset."); + } + + this.database.transaction(ctx -> { + if (isMigration) { + clearLegacyState(ctx, connectionId); + } + switch (state.getStateType()) { + case GLOBAL -> saveGlobalState(ctx, connectionId, state.getGlobal().getGlobal()); + case STREAM -> saveStreamState(ctx, connectionId, state.getStateMessages()); + case LEGACY -> saveLegacyState(ctx, connectionId, state.getLegacyState()); + } + return null; + }); + } + + private static void clearLegacyState(final DSLContext ctx, final UUID connectionId) { + writeStateToDb(ctx, connectionId, null, null, StateType.LEGACY, null); + } + + private static void saveGlobalState(final DSLContext ctx, final UUID connectionId, final AirbyteGlobalState globalState) { + writeStateToDb(ctx, connectionId, null, null, StateType.GLOBAL, globalState.getSharedState()); + for (final AirbyteStreamState streamState : globalState.getStreamStates()) { + writeStateToDb(ctx, + connectionId, + streamState.getStreamDescriptor().getName(), + streamState.getStreamDescriptor().getNamespace(), + StateType.GLOBAL, + streamState.getStreamState()); + } + } + + private static void saveStreamState(final DSLContext ctx, final UUID connectionId, final List stateMessages) { + for (final AirbyteStateMessage stateMessage : stateMessages) { + final AirbyteStreamState streamState = stateMessage.getStream(); + writeStateToDb(ctx, + connectionId, + streamState.getStreamDescriptor().getName(), + streamState.getStreamDescriptor().getNamespace(), + StateType.STREAM, + streamState.getStreamState()); + } + } + + private static void saveLegacyState(final DSLContext ctx, final UUID connectionId, final JsonNode state) { + writeStateToDb(ctx, connectionId, null, null, StateType.LEGACY, state); + } + + /** + * Performs the actual SQL operation depending on the state + * + * If the state is null, it will delete the row, otherwise do an insert or update on conflict + */ + static void writeStateToDb(final DSLContext ctx, + final UUID connectionId, + final String streamName, + final String namespace, + final StateType stateType, + final JsonNode state) { + if (state != null) { + final boolean hasState = ctx.selectFrom(STATE) + .where( + STATE.CONNECTION_ID.eq(connectionId), + isNullOrEquals(STATE.STREAM_NAME, streamName), + isNullOrEquals(STATE.NAMESPACE, namespace)) + .fetch().isNotEmpty(); + + // NOTE: the legacy code was storing a State object instead of just the State data field. We kept + // the same behavior for consistency. + final JSONB jsonbState = JSONB.valueOf(Jsons.serialize(stateType != StateType.LEGACY ? state : new State().withState(state))); + final OffsetDateTime now = OffsetDateTime.now(); + + if (!hasState) { + ctx.insertInto(STATE) + .columns( + STATE.ID, + STATE.CREATED_AT, + STATE.UPDATED_AT, + STATE.CONNECTION_ID, + STATE.STREAM_NAME, + STATE.NAMESPACE, + STATE.STATE_, + STATE.TYPE) + .values( + UUID.randomUUID(), + now, + now, + connectionId, + streamName, + namespace, + jsonbState, + Enums.convertTo(stateType, io.airbyte.db.instance.configs.jooq.generated.enums.StateType.class)) + .execute(); + + } else { + ctx.update(STATE) + .set(STATE.UPDATED_AT, now) + .set(STATE.STATE_, jsonbState) + .where( + STATE.CONNECTION_ID.eq(connectionId), + isNullOrEquals(STATE.STREAM_NAME, streamName), + isNullOrEquals(STATE.NAMESPACE, namespace)) + .execute(); + } + + } else { + // If the state is null, we remove the state instead of keeping a null row + ctx.deleteFrom(STATE) + .where( + STATE.CONNECTION_ID.eq(connectionId), + isNullOrEquals(STATE.STREAM_NAME, streamName), + isNullOrEquals(STATE.NAMESPACE, namespace)) + .execute(); + } + } + + /** + * Helper function to handle null or equal case for the optional strings + * + * We need to have an explicit check for null values because NULL != "str" is NULL, not a boolean. + * + * @param field the targeted field + * @param value the value to check + * @return The Condition that performs the desired check + */ + private static Condition isNullOrEquals(final Field field, final String value) { + return value != null ? field.eq(value) : field.isNull(); + } + + /** + * Get the StateType for a given list of StateRecords + * + * @param connectionId The connectionId of the records, used to add more debugging context if an + * error is detected + * @param records The list of StateRecords to process, must not be empty + * @return the StateType of the records + * @throws IllegalStateException If StateRecords have inconsistent types + */ + private static io.airbyte.db.instance.configs.jooq.generated.enums.StateType getStateType( + final UUID connectionId, + final List records) { + final Set types = + records.stream().map(r -> r.type).collect(Collectors.toSet()); + if (types.size() == 1) { + return types.stream().findFirst().get(); + } + + throw new IllegalStateException("Inconsistent StateTypes for connectionId " + connectionId + + " (" + String.join(", ", types.stream().map(stateType -> stateType.getLiteral()).toList()) + ")"); + } + + /** + * Get the state records from the DB + * + * @param ctx A valid DSL context to use for the query + * @param connectionId the ID of the connection + * @return The StateRecords for the connectionId + */ + private static List getStateRecords(final DSLContext ctx, final UUID connectionId) { + return ctx.select(DSL.asterisk()) + .from(STATE) + .where(STATE.CONNECTION_ID.eq(connectionId)) + .fetch(getStateRecordMapper()) + .stream().toList(); + } + + /** + * Build Global state + * + * The list of records can contain one global shared state that is the state without streamName and + * without namespace The other records should be translated into AirbyteStreamState + */ + private static StateWrapper buildGlobalState(final List records) { + // Split the global shared state from the other per stream records + final Map> partitions = records.stream() + .collect(Collectors.partitioningBy(r -> r.streamName == null && r.namespace == null)); + + final AirbyteGlobalState globalState = new AirbyteGlobalState() + .withSharedState(partitions.get(Boolean.TRUE).stream().map(r -> r.state).findFirst().orElse(null)) + .withStreamStates(partitions.get(Boolean.FALSE).stream().map(StatePersistence::buildAirbyteStreamState).toList()); + + final AirbyteStateMessage msg = new AirbyteStateMessage() + .withType(AirbyteStateType.GLOBAL) + .withGlobal(globalState); + return new StateWrapper().withStateType(StateType.GLOBAL).withGlobal(msg); + } + + /** + * Build StateWrapper for a PerStream state + */ + private static StateWrapper buildStreamState(final List records) { + final List messages = records.stream().map( + record -> new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(buildAirbyteStreamState(record))) + .toList(); + return new StateWrapper().withStateType(StateType.STREAM).withStateMessages(messages); + } + + /** + * Build a StateWrapper for Legacy state + */ + private static StateWrapper buildLegacyState(final List records) { + final State legacyState = Jsons.convertValue(records.get(0).state, State.class); + return new StateWrapper() + .withStateType(StateType.LEGACY) + .withLegacyState(legacyState.getState()); + } + + /** + * Convert a StateRecord to an AirbyteStreamState + */ + private static AirbyteStreamState buildAirbyteStreamState(final StateRecord record) { + return new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName(record.streamName).withNamespace(record.namespace)) + .withStreamState(record.state); + } + + private static RecordMapper getStateRecordMapper() { + return record -> new StateRecord( + record.get(STATE.TYPE, io.airbyte.db.instance.configs.jooq.generated.enums.StateType.class), + record.get(STATE.STREAM_NAME, String.class), + record.get(STATE.NAMESPACE, String.class), + Jsons.deserialize(record.get(STATE.STATE_).data())); + } + + private record StateRecord( + io.airbyte.db.instance.configs.jooq.generated.enums.StateType type, + String streamName, + String namespace, + JsonNode state) {} + +} diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StreamResetPersistence.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StreamResetPersistence.java index 9b8bde3a16bb..0cd2e01adf09 100644 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StreamResetPersistence.java +++ b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StreamResetPersistence.java @@ -6,10 +6,10 @@ import static org.jooq.impl.DSL.noCondition; -import io.airbyte.config.StreamDescriptor; import io.airbyte.config.StreamResetRecord; import io.airbyte.db.Database; import io.airbyte.db.ExceptionWrappingDatabase; +import io.airbyte.protocol.models.StreamDescriptor; import java.io.IOException; import java.time.OffsetDateTime; import java.util.List; @@ -38,7 +38,7 @@ public StreamResetPersistence(final Database database) { } /* - * Get a list of streamDescriptors for streams that have pending or running resets + * Get a list of StreamDescriptors for streams that have pending or running resets */ public List getStreamResets(final UUID connectionId) throws IOException { return database.query(ctx -> ctx.select(DSL.asterisk()) diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/JsonSecretsProcessor.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/JsonSecretsProcessor.java index 88f0c796273d..f97ce914bb1a 100644 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/JsonSecretsProcessor.java +++ b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/JsonSecretsProcessor.java @@ -17,6 +17,7 @@ import java.util.List; import java.util.Optional; import java.util.Set; +import java.util.stream.Collectors; import lombok.Builder; import lombok.extern.slf4j.Slf4j; @@ -76,11 +77,14 @@ public JsonNode prepareSecretsForOutput(final JsonNode obj, final JsonNode schem * @return json object with all secrets masked. */ public static JsonNode maskAllSecrets(final JsonNode json, final JsonNode schema) { - final Set pathsWithSecrets = JsonSchemas.collectJsonPathsThatMeetCondition( + final Set pathsWithSecrets = JsonSchemas.collectPathsThatMeetCondition( schema, node -> MoreIterators.toList(node.fields()) .stream() - .anyMatch(field -> AIRBYTE_SECRET_FIELD.equals(field.getKey()))); + .anyMatch(field -> AIRBYTE_SECRET_FIELD.equals(field.getKey()))) + .stream() + .map(JsonPaths::mapJsonSchemaPathToJsonPath) + .collect(Collectors.toSet()); JsonNode copy = Jsons.clone(json); for (final String path : pathsWithSecrets) { diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretPersistence.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretPersistence.java index a98140c81814..bd039f170d4e 100644 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretPersistence.java +++ b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretPersistence.java @@ -31,6 +31,9 @@ static Optional getLongLived(final DSLContext dslContext, fin case GOOGLE_SECRET_MANAGER -> { return Optional.of(GoogleSecretManagerPersistence.getLongLived(configs.getSecretStoreGcpProjectId(), configs.getSecretStoreGcpCredentials())); } + case VAULT -> { + return Optional.of(new VaultSecretPersistence(configs.getVaultAddress(), configs.getVaultPrefix(), configs.getVaultToken())); + } default -> { return Optional.empty(); } @@ -56,6 +59,9 @@ static Optional getEphemeral(final DSLContext dslContext, fin case GOOGLE_SECRET_MANAGER -> { return Optional.of(GoogleSecretManagerPersistence.getEphemeral(configs.getSecretStoreGcpProjectId(), configs.getSecretStoreGcpCredentials())); } + case VAULT -> { + return Optional.of(new VaultSecretPersistence(configs.getVaultAddress(), configs.getVaultPrefix(), configs.getVaultToken())); + } default -> { return Optional.empty(); } diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretsHelpers.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretsHelpers.java index 4f1d51ea9473..eef92dcdfd97 100644 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretsHelpers.java +++ b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretsHelpers.java @@ -171,12 +171,13 @@ public static SplitSecretConfig splitAndUpdateConfig(final Supplier uuidSu * in an ascending alphabetical order. */ public static List getSortedSecretPaths(final JsonNode spec) { - return JsonSchemas.collectJsonPathsThatMeetCondition( + return JsonSchemas.collectPathsThatMeetCondition( spec, node -> MoreIterators.toList(node.fields()) .stream() .anyMatch(field -> field.getKey().equals(JsonSecretsProcessor.AIRBYTE_SECRET_FIELD))) .stream() + .map(JsonPaths::mapJsonSchemaPathToJsonPath) .sorted() .toList(); } diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/VaultSecretPersistence.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/VaultSecretPersistence.java new file mode 100644 index 000000000000..066f06f109a6 --- /dev/null +++ b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/split_secrets/VaultSecretPersistence.java @@ -0,0 +1,65 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.config.persistence.split_secrets; + +import com.bettercloud.vault.Vault; +import com.bettercloud.vault.VaultConfig; +import com.bettercloud.vault.VaultException; +import io.airbyte.commons.lang.Exceptions; +import java.util.HashMap; +import java.util.Optional; +import lombok.extern.slf4j.Slf4j; +import lombok.val; + +@Slf4j +final public class VaultSecretPersistence implements SecretPersistence { + + private final String SECRET_KEY = "value"; + private final Vault vault; + private final String pathPrefix; + + public VaultSecretPersistence(final String address, final String prefix, final String token) { + this.vault = Exceptions.toRuntime(() -> getVaultClient(address, token)); + this.pathPrefix = prefix; + } + + @Override + public Optional read(final SecretCoordinate coordinate) { + try { + val response = vault.logical().read(pathPrefix + coordinate.getFullCoordinate()); + val restResponse = response.getRestResponse(); + val responseCode = restResponse.getStatus(); + if (responseCode != 200) { + log.error("Vault failed on read. Response code: " + responseCode); + return Optional.empty(); + } + val data = response.getData(); + return Optional.of(data.get(SECRET_KEY)); + } catch (final VaultException e) { + return Optional.empty(); + } + } + + @Override + public void write(final SecretCoordinate coordinate, final String payload) { + try { + val newSecret = new HashMap(); + newSecret.put(SECRET_KEY, payload); + vault.logical().write(pathPrefix + coordinate.getFullCoordinate(), newSecret); + } catch (final VaultException e) { + log.error("Vault failed on write", e); + } + } + + private static Vault getVaultClient(final String address, final String token) throws VaultException { + val config = new VaultConfig() + .address(address) + .token(token) + .engineVersion(2) + .build(); + return new Vault(config); + } + +} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConfigRepositoryTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConfigRepositoryTest.java index bcb20fd9e8e8..c596fca20ace 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConfigRepositoryTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConfigRepositoryTest.java @@ -28,12 +28,15 @@ import io.airbyte.config.StandardWorkspace; import io.airbyte.config.State; import io.airbyte.db.Database; +import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import io.airbyte.protocol.models.ConfiguredAirbyteStream; +import io.airbyte.protocol.models.StreamDescriptor; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; -import java.util.Optional; import java.util.UUID; import org.jooq.Result; import org.junit.jupiter.api.AfterEach; @@ -113,22 +116,6 @@ void testWorkspaceByConnectionId(final boolean isTombstone) throws ConfigNotFoun verify(configRepository).getStandardWorkspace(WORKSPACE_ID, isTombstone); } - @Test - void testGetConnectionState() throws Exception { - final UUID connectionId = UUID.randomUUID(); - final State state = new State().withState(Jsons.deserialize("{ \"cursor\": 1000 }")); - final StandardSyncState connectionState = new StandardSyncState().withConnectionId(connectionId).withState(state); - - when(configPersistence.getConfig(ConfigSchema.STANDARD_SYNC_STATE, connectionId.toString(), StandardSyncState.class)) - .thenThrow(new ConfigNotFoundException(ConfigSchema.STANDARD_SYNC_STATE, connectionId)); - assertEquals(Optional.empty(), configRepository.getConnectionState(connectionId)); - - reset(configPersistence); - when(configPersistence.getConfig(ConfigSchema.STANDARD_SYNC_STATE, connectionId.toString(), StandardSyncState.class)) - .thenReturn(connectionState); - assertEquals(Optional.of(state), configRepository.getConnectionState(connectionId)); - } - @Test void testUpdateConnectionState() throws Exception { final UUID connectionId = UUID.randomUUID(); @@ -448,4 +435,31 @@ void testHealthCheckFailure() throws SQLException { assertFalse(check); } + @Test + void testGetAllStreamsForConnection() throws Exception { + final UUID connectionId = UUID.randomUUID(); + final AirbyteStream airbyteStream = new AirbyteStream().withName("stream1").withNamespace("namespace1"); + final ConfiguredAirbyteStream configuredStream = new ConfiguredAirbyteStream().withStream(airbyteStream); + final AirbyteStream airbyteStream2 = new AirbyteStream().withName("stream2"); + final ConfiguredAirbyteStream configuredStream2 = new ConfiguredAirbyteStream().withStream(airbyteStream2); + final ConfiguredAirbyteCatalog configuredCatalog = new ConfiguredAirbyteCatalog().withStreams(List.of(configuredStream, configuredStream2)); + + final StandardSync sync = new StandardSync() + .withCatalog(configuredCatalog); + doReturn(sync) + .when(configRepository) + .getStandardSync(connectionId); + + final List result = configRepository.getAllStreamsForConnection(connectionId); + assertEquals(2, result.size()); + + assertTrue( + result.stream().anyMatch( + streamDescriptor -> streamDescriptor.getName().equals("stream1") && streamDescriptor.getNamespace().equals("namespace1"))); + assertTrue( + result.stream().anyMatch( + streamDescriptor -> streamDescriptor.getName().equals("stream2") && streamDescriptor.getNamespace() == null)); + + } + } diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StatePersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StatePersistenceTest.java new file mode 100644 index 000000000000..e782d76bdb47 --- /dev/null +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StatePersistenceTest.java @@ -0,0 +1,586 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.config.persistence; + +import static org.mockito.Mockito.mock; + +import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.commons.enums.Enums; +import io.airbyte.commons.json.Jsons; +import io.airbyte.config.DestinationConnection; +import io.airbyte.config.SourceConnection; +import io.airbyte.config.StandardDestinationDefinition; +import io.airbyte.config.StandardSourceDefinition; +import io.airbyte.config.StandardSync; +import io.airbyte.config.StandardWorkspace; +import io.airbyte.config.State; +import io.airbyte.config.StateType; +import io.airbyte.config.StateWrapper; +import io.airbyte.config.persistence.split_secrets.JsonSecretsProcessor; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.FlywayFactory; +import io.airbyte.db.init.DatabaseInitializationException; +import io.airbyte.db.instance.configs.ConfigsDatabaseMigrator; +import io.airbyte.db.instance.configs.ConfigsDatabaseTestProvider; +import io.airbyte.protocol.models.AirbyteGlobalState; +import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; +import io.airbyte.protocol.models.AirbyteStreamState; +import io.airbyte.protocol.models.StreamDescriptor; +import io.airbyte.test.utils.DatabaseConnectionHelper; +import io.airbyte.validation.json.JsonValidationException; +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Optional; +import java.util.UUID; +import org.jooq.JSONB; +import org.jooq.SQLDialect; +import org.jooq.impl.DSL; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +public class StatePersistenceTest extends BaseDatabaseConfigPersistenceTest { + + private ConfigRepository configRepository; + private StatePersistence statePersistence; + private UUID connectionId; + + @Test + public void testReadingNonExistingState() throws IOException { + Assertions.assertTrue(statePersistence.getCurrentState(UUID.randomUUID()).isEmpty()); + } + + @Test + public void testLegacyReadWrite() throws IOException { + final StateWrapper state0 = new StateWrapper() + .withStateType(StateType.LEGACY) + .withLegacyState(Jsons.deserialize("{\"woot\": \"legacy states is passthrough\"}")); + + // Initial write/read loop, making sure we read what we wrote + statePersistence.updateOrCreateState(connectionId, state0); + final Optional state1 = statePersistence.getCurrentState(connectionId); + + Assertions.assertTrue(state1.isPresent()); + Assertions.assertEquals(StateType.LEGACY, state1.get().getStateType()); + Assertions.assertEquals(state0.getLegacyState(), state1.get().getLegacyState()); + + // Updating a state + final JsonNode newStateJson = Jsons.deserialize("{\"woot\": \"new state\"}"); + final StateWrapper state2 = clone(state1.get()).withLegacyState(newStateJson); + statePersistence.updateOrCreateState(connectionId, state2); + final Optional state3 = statePersistence.getCurrentState(connectionId); + + Assertions.assertTrue(state3.isPresent()); + Assertions.assertEquals(StateType.LEGACY, state3.get().getStateType()); + Assertions.assertEquals(newStateJson, state3.get().getLegacyState()); + + // Deleting a state + final StateWrapper state4 = clone(state3.get()).withLegacyState(null); + statePersistence.updateOrCreateState(connectionId, state4); + Assertions.assertTrue(statePersistence.getCurrentState(connectionId).isEmpty()); + } + + @Test + public void testLegacyMigrationToGlobal() throws IOException { + final StateWrapper state0 = new StateWrapper() + .withStateType(StateType.LEGACY) + .withLegacyState(Jsons.deserialize("{\"woot\": \"legacy states is passthrough\"}")); + + statePersistence.updateOrCreateState(connectionId, state0); + + final StateWrapper newGlobalState = new StateWrapper() + .withStateType(StateType.GLOBAL) + .withGlobal(new AirbyteStateMessage() + .withType(AirbyteStateType.GLOBAL) + .withGlobal(new AirbyteGlobalState() + .withSharedState(Jsons.deserialize("\"woot\"")) + .withStreamStates(Arrays.asList( + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("s1").withNamespace("n2")) + .withStreamState(Jsons.deserialize("\"state1\"")), + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("s1")) + .withStreamState(Jsons.deserialize("\"state2\"")))))); + statePersistence.updateOrCreateState(connectionId, newGlobalState); + final StateWrapper storedGlobalState = statePersistence.getCurrentState(connectionId).orElseThrow(); + assertEquals(newGlobalState, storedGlobalState); + } + + @Test + public void testLegacyMigrationToStream() throws IOException { + final StateWrapper state0 = new StateWrapper() + .withStateType(StateType.LEGACY) + .withLegacyState(Jsons.deserialize("{\"woot\": \"legacy states is passthrough\"}")); + + statePersistence.updateOrCreateState(connectionId, state0); + + final StateWrapper newStreamState = new StateWrapper() + .withStateType(StateType.STREAM) + .withStateMessages(Arrays.asList( + new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("s1").withNamespace("n1")) + .withStreamState(Jsons.deserialize("\"state s1.n1\""))), + new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("s2")) + .withStreamState(Jsons.deserialize("\"state s2\""))))); + statePersistence.updateOrCreateState(connectionId, newStreamState); + final StateWrapper storedStreamState = statePersistence.getCurrentState(connectionId).orElseThrow(); + assertEquals(newStreamState, storedStreamState); + } + + @Test + public void testGlobalReadWrite() throws IOException { + final StateWrapper state0 = new StateWrapper() + .withStateType(StateType.GLOBAL) + .withGlobal(new AirbyteStateMessage() + .withType(AirbyteStateType.GLOBAL) + .withGlobal(new AirbyteGlobalState() + .withSharedState(Jsons.deserialize("\"my global state\"")) + .withStreamStates(Arrays.asList( + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("s1").withNamespace("n2")) + .withStreamState(Jsons.deserialize("\"state1\"")), + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("s1")) + .withStreamState(Jsons.deserialize("\"state2\"")))))); + + // Initial write/read loop, making sure we read what we wrote + statePersistence.updateOrCreateState(connectionId, state0); + final Optional state1 = statePersistence.getCurrentState(connectionId); + Assertions.assertTrue(state1.isPresent()); + assertEquals(state0, state1.get()); + + // Updating a state + final StateWrapper state2 = clone(state1.get()); + state2.getGlobal() + .getGlobal().withSharedState(Jsons.deserialize("\"updated shared state\"")) + .getStreamStates().get(1).withStreamState(Jsons.deserialize("\"updated state2\"")); + statePersistence.updateOrCreateState(connectionId, state2); + final Optional state3 = statePersistence.getCurrentState(connectionId); + + Assertions.assertTrue(state3.isPresent()); + assertEquals(state2, state3.get()); + + // Updating a state with name and namespace + final StateWrapper state4 = clone(state1.get()); + state4.getGlobal().getGlobal() + .getStreamStates().get(0).withStreamState(Jsons.deserialize("\"updated state1\"")); + statePersistence.updateOrCreateState(connectionId, state4); + final Optional state5 = statePersistence.getCurrentState(connectionId); + + Assertions.assertTrue(state5.isPresent()); + assertEquals(state4, state5.get()); + } + + @Test + public void testGlobalPartialReset() throws IOException { + final StateWrapper state0 = new StateWrapper() + .withStateType(StateType.GLOBAL) + .withGlobal(new AirbyteStateMessage() + .withType(AirbyteStateType.GLOBAL) + .withGlobal(new AirbyteGlobalState() + .withSharedState(Jsons.deserialize("\"my global state\"")) + .withStreamStates(Arrays.asList( + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("s1").withNamespace("n2")) + .withStreamState(Jsons.deserialize("\"state1\"")), + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("s1")) + .withStreamState(Jsons.deserialize("\"state2\"")))))); + + // Set the initial state + statePersistence.updateOrCreateState(connectionId, state0); + + // incomplete reset does not remove the state + final StateWrapper incompletePartialReset = new StateWrapper() + .withStateType(StateType.GLOBAL) + .withGlobal(new AirbyteStateMessage() + .withType(AirbyteStateType.GLOBAL) + .withGlobal(new AirbyteGlobalState() + .withSharedState(Jsons.deserialize("\"my global state\"")) + .withStreamStates(Arrays.asList( + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("s1")) + .withStreamState(Jsons.deserialize("\"state2\"")))))); + statePersistence.updateOrCreateState(connectionId, incompletePartialReset); + final StateWrapper incompletePartialResetResult = statePersistence.getCurrentState(connectionId).orElseThrow(); + Assertions.assertEquals(state0, incompletePartialResetResult); + + // The good partial reset + final StateWrapper partialReset = new StateWrapper() + .withStateType(StateType.GLOBAL) + .withGlobal(new AirbyteStateMessage() + .withType(AirbyteStateType.GLOBAL) + .withGlobal(new AirbyteGlobalState() + .withSharedState(Jsons.deserialize("\"my global state\"")) + .withStreamStates(Arrays.asList( + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("s1").withNamespace("n2")) + .withStreamState(Jsons.deserialize("\"state1\"")), + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("s1")) + .withStreamState(null))))); + statePersistence.updateOrCreateState(connectionId, partialReset); + final StateWrapper partialResetResult = statePersistence.getCurrentState(connectionId).orElseThrow(); + + Assertions.assertEquals(partialReset.getGlobal().getGlobal().getSharedState(), + partialResetResult.getGlobal().getGlobal().getSharedState()); + // {"name": "s1"} should have been removed from the stream states + Assertions.assertEquals(1, partialResetResult.getGlobal().getGlobal().getStreamStates().size()); + Assertions.assertEquals(partialReset.getGlobal().getGlobal().getStreamStates().get(0), + partialResetResult.getGlobal().getGlobal().getStreamStates().get(0)); + } + + @Test + public void testGlobalFullReset() throws IOException { + final StateWrapper state0 = new StateWrapper() + .withStateType(StateType.GLOBAL) + .withGlobal(new AirbyteStateMessage() + .withType(AirbyteStateType.GLOBAL) + .withGlobal(new AirbyteGlobalState() + .withSharedState(Jsons.deserialize("\"my global state\"")) + .withStreamStates(Arrays.asList( + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("s1").withNamespace("n2")) + .withStreamState(Jsons.deserialize("\"state1\"")), + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("s1")) + .withStreamState(Jsons.deserialize("\"state2\"")))))); + + final StateWrapper fullReset = new StateWrapper() + .withStateType(StateType.GLOBAL) + .withGlobal(new AirbyteStateMessage() + .withType(AirbyteStateType.GLOBAL) + .withGlobal(new AirbyteGlobalState() + .withSharedState(null) + .withStreamStates(Arrays.asList( + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("s1").withNamespace("n2")) + .withStreamState(null), + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("s1")) + .withStreamState(null)))));; + + statePersistence.updateOrCreateState(connectionId, state0); + statePersistence.updateOrCreateState(connectionId, fullReset); + final Optional fullResetResult = statePersistence.getCurrentState(connectionId); + Assertions.assertTrue(fullResetResult.isEmpty()); + } + + @Test + public void testGlobalStateAllowsEmptyNameAndNamespace() throws IOException { + final StateWrapper state0 = new StateWrapper() + .withStateType(StateType.GLOBAL) + .withGlobal(new AirbyteStateMessage() + .withType(AirbyteStateType.GLOBAL) + .withGlobal(new AirbyteGlobalState() + .withSharedState(Jsons.deserialize("\"my global state\"")) + .withStreamStates(Arrays.asList( + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("")) + .withStreamState(Jsons.deserialize("\"empty name state\"")), + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("").withNamespace("")) + .withStreamState(Jsons.deserialize("\"empty name and namespace state\"")))))); + + statePersistence.updateOrCreateState(connectionId, state0); + final StateWrapper state1 = statePersistence.getCurrentState(connectionId).orElseThrow(); + assertEquals(state0, state1); + } + + @Test + public void testStreamReadWrite() throws IOException { + final StateWrapper state0 = new StateWrapper() + .withStateType(StateType.STREAM) + .withStateMessages(Arrays.asList( + new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("s1").withNamespace("n1")) + .withStreamState(Jsons.deserialize("\"state s1.n1\""))), + new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("s2")) + .withStreamState(Jsons.deserialize("\"state s2\""))))); + + // Initial write/read loop, making sure we read what we wrote + statePersistence.updateOrCreateState(connectionId, state0); + final StateWrapper state1 = statePersistence.getCurrentState(connectionId).orElseThrow(); + assertEquals(state0, state1); + + // Updating a state + final StateWrapper state2 = clone(state1); + state2.getStateMessages().get(1).getStream().withStreamState(Jsons.deserialize("\"updated state s2\"")); + statePersistence.updateOrCreateState(connectionId, state2); + final StateWrapper state3 = statePersistence.getCurrentState(connectionId).orElseThrow(); + assertEquals(state2, state3); + + // Updating a state with name and namespace + final StateWrapper state4 = clone(state1); + state4.getStateMessages().get(0).getStream().withStreamState(Jsons.deserialize("\"updated state s1\"")); + statePersistence.updateOrCreateState(connectionId, state4); + final StateWrapper state5 = statePersistence.getCurrentState(connectionId).orElseThrow(); + assertEquals(state4, state5); + } + + @Test + public void testStreamPartialUpdates() throws IOException { + final StateWrapper state0 = new StateWrapper() + .withStateType(StateType.STREAM) + .withStateMessages(Arrays.asList( + new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("s1").withNamespace("n1")) + .withStreamState(Jsons.deserialize("\"state s1.n1\""))), + new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("s2")) + .withStreamState(Jsons.deserialize("\"state s2\""))))); + + statePersistence.updateOrCreateState(connectionId, state0); + + // Partial update + final StateWrapper partialUpdate = new StateWrapper() + .withStateType(StateType.STREAM) + .withStateMessages(Collections.singletonList( + new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("s1").withNamespace("n1")) + .withStreamState(Jsons.deserialize("\"updated\""))))); + statePersistence.updateOrCreateState(connectionId, partialUpdate); + final StateWrapper partialUpdateResult = statePersistence.getCurrentState(connectionId).orElseThrow(); + assertEquals( + new StateWrapper() + .withStateType(StateType.STREAM) + .withStateMessages(Arrays.asList( + new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("s1").withNamespace("n1")) + .withStreamState(Jsons.deserialize("\"updated\""))), + new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("s2")) + .withStreamState(Jsons.deserialize("\"state s2\""))))), + partialUpdateResult); + + // Partial Reset + final StateWrapper partialReset = new StateWrapper() + .withStateType(StateType.STREAM) + .withStateMessages(Collections.singletonList( + new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("s2")) + .withStreamState(null)))); + statePersistence.updateOrCreateState(connectionId, partialReset); + final StateWrapper partialResetResult = statePersistence.getCurrentState(connectionId).orElseThrow(); + assertEquals( + new StateWrapper() + .withStateType(StateType.STREAM) + .withStateMessages(Arrays.asList( + new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("s1").withNamespace("n1")) + .withStreamState(Jsons.deserialize("\"updated\""))))), + partialResetResult); + } + + @Test + public void testStreamFullReset() throws IOException { + final StateWrapper state0 = new StateWrapper() + .withStateType(StateType.STREAM) + .withStateMessages(Arrays.asList( + new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("s1").withNamespace("n1")) + .withStreamState(Jsons.deserialize("\"state s1.n1\""))), + new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("s2")) + .withStreamState(Jsons.deserialize("\"state s2\""))))); + + statePersistence.updateOrCreateState(connectionId, state0); + + // Partial update + final StateWrapper fullReset = new StateWrapper() + .withStateType(StateType.STREAM) + .withStateMessages(Arrays.asList( + new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("s1").withNamespace("n1")) + .withStreamState(null)), + new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("s2")) + .withStreamState(null)))); + statePersistence.updateOrCreateState(connectionId, fullReset); + final Optional fullResetResult = statePersistence.getCurrentState(connectionId); + Assertions.assertTrue(fullResetResult.isEmpty()); + } + + @Test + public void testInconsistentTypeUpdates() throws IOException { + final StateWrapper streamState = new StateWrapper() + .withStateType(StateType.STREAM) + .withStateMessages(Arrays.asList( + new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("s1").withNamespace("n1")) + .withStreamState(Jsons.deserialize("\"state s1.n1\""))), + new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("s2")) + .withStreamState(Jsons.deserialize("\"state s2\""))))); + statePersistence.updateOrCreateState(connectionId, streamState); + + Assertions.assertThrows(IllegalStateException.class, () -> { + final StateWrapper globalState = new StateWrapper() + .withStateType(StateType.GLOBAL) + .withGlobal(new AirbyteStateMessage() + .withType(AirbyteStateType.GLOBAL) + .withGlobal(new AirbyteGlobalState() + .withSharedState(Jsons.deserialize("\"my global state\"")) + .withStreamStates(Arrays.asList( + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("")) + .withStreamState(Jsons.deserialize("\"empty name state\"")), + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("").withNamespace("")) + .withStreamState(Jsons.deserialize("\"empty name and namespace state\"")))))); + statePersistence.updateOrCreateState(connectionId, globalState); + }); + + // We should be guarded against those cases let's make sure we don't make things worse if we're in + // an inconsistent state + dslContext.insertInto(DSL.table("state")) + .columns(DSL.field("id"), DSL.field("connection_id"), DSL.field("type"), DSL.field("state")) + .values(UUID.randomUUID(), connectionId, io.airbyte.db.instance.configs.jooq.generated.enums.StateType.GLOBAL, JSONB.valueOf("{}")) + .execute(); + Assertions.assertThrows(IllegalStateException.class, () -> statePersistence.updateOrCreateState(connectionId, streamState)); + Assertions.assertThrows(IllegalStateException.class, () -> statePersistence.getCurrentState(connectionId)); + } + + @Test + public void testEnumsConversion() { + // Making sure StateType we write to the DB and the StateType from the protocols are aligned. + // Otherwise, we'll have to dig through runtime errors. + Assertions.assertTrue(Enums.isCompatible( + io.airbyte.db.instance.configs.jooq.generated.enums.StateType.class, + io.airbyte.config.StateType.class)); + } + + @Test + public void testStatePersistenceLegacyReadConsistency() throws IOException { + final JsonNode jsonState = Jsons.deserialize("{\"my\": \"state\"}"); + final State state = new State().withState(jsonState); + configRepository.updateConnectionState(connectionId, state); + + final StateWrapper readStateWrapper = statePersistence.getCurrentState(connectionId).orElseThrow(); + Assertions.assertEquals(StateType.LEGACY, readStateWrapper.getStateType()); + Assertions.assertEquals(state.getState(), readStateWrapper.getLegacyState()); + } + + @Test + public void testStatePersistenceLegacyWriteConsistency() throws IOException { + final JsonNode jsonState = Jsons.deserialize("{\"my\": \"state\"}"); + final StateWrapper stateWrapper = new StateWrapper().withStateType(StateType.LEGACY).withLegacyState(jsonState); + statePersistence.updateOrCreateState(connectionId, stateWrapper); + + // Making sure we still follow the legacy format + final List readStates = dslContext + .selectFrom("state") + .where(DSL.field("connection_id").eq(connectionId)) + .fetch().map(r -> Jsons.deserialize(r.get(DSL.field("state", JSONB.class)).data(), State.class)) + .stream().toList(); + Assertions.assertEquals(1, readStates.size()); + + Assertions.assertEquals(readStates.get(0).getState(), stateWrapper.getLegacyState()); + } + + @BeforeEach + public void beforeEach() throws DatabaseInitializationException, IOException, JsonValidationException { + dataSource = DatabaseConnectionHelper.createDataSource(container); + dslContext = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES); + flyway = FlywayFactory.create(dataSource, DatabaseConfigPersistenceLoadDataTest.class.getName(), + ConfigsDatabaseMigrator.DB_IDENTIFIER, ConfigsDatabaseMigrator.MIGRATION_FILE_LOCATION); + database = new ConfigsDatabaseTestProvider(dslContext, flyway).create(true); + setupTestData(); + + statePersistence = new StatePersistence(database); + } + + @AfterEach + public void afterEach() { + // Making sure we reset between tests + dslContext.dropSchemaIfExists("public").cascade().execute(); + dslContext.createSchema("public").execute(); + dslContext.setSchema("public").execute(); + } + + private void setupTestData() throws JsonValidationException, IOException { + configRepository = new ConfigRepository( + new DatabaseConfigPersistence(database, mock(JsonSecretsProcessor.class)), + database); + + final StandardWorkspace workspace = MockData.standardWorkspaces().get(0); + final StandardSourceDefinition sourceDefinition = MockData.publicSourceDefinition(); + final SourceConnection sourceConnection = MockData.sourceConnections().get(0); + final StandardDestinationDefinition destinationDefinition = MockData.publicDestinationDefinition(); + final DestinationConnection destinationConnection = MockData.destinationConnections().get(0); + final StandardSync sync = MockData.standardSyncs().get(0); + + configRepository.writeStandardWorkspace(workspace); + configRepository.writeStandardSourceDefinition(sourceDefinition); + configRepository.writeSourceConnectionNoSecrets(sourceConnection); + configRepository.writeStandardDestinationDefinition(destinationDefinition); + configRepository.writeDestinationConnectionNoSecrets(destinationConnection); + configRepository.writeStandardSyncOperation(MockData.standardSyncOperations().get(0)); + configRepository.writeStandardSyncOperation(MockData.standardSyncOperations().get(1)); + configRepository.writeStandardSync(sync); + + connectionId = sync.getConnectionId(); + } + + private StateWrapper clone(final StateWrapper state) { + return switch (state.getStateType()) { + case LEGACY -> new StateWrapper() + .withLegacyState(Jsons.deserialize(Jsons.serialize(state.getLegacyState()))) + .withStateType(state.getStateType()); + case STREAM -> new StateWrapper() + .withStateMessages( + state.getStateMessages().stream().map(msg -> Jsons.deserialize(Jsons.serialize(msg), AirbyteStateMessage.class)).toList()) + .withStateType(state.getStateType()); + case GLOBAL -> new StateWrapper() + .withGlobal(Jsons.deserialize(Jsons.serialize(state.getGlobal()), AirbyteStateMessage.class)) + .withStateType(state.getStateType()); + }; + } + + private void assertEquals(StateWrapper lhs, StateWrapper rhs) { + Assertions.assertEquals(Jsons.serialize(lhs), Jsons.serialize(rhs)); + } + +} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StreamResetPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StreamResetPersistenceTest.java index 30dd5f486faf..3dd20f0ed557 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StreamResetPersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StreamResetPersistenceTest.java @@ -8,7 +8,6 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.spy; -import io.airbyte.config.StreamDescriptor; import io.airbyte.db.factory.DSLContextFactory; import io.airbyte.db.factory.DataSourceFactory; import io.airbyte.db.factory.FlywayFactory; @@ -16,6 +15,7 @@ import io.airbyte.db.instance.configs.ConfigsDatabaseTestProvider; import io.airbyte.db.instance.development.DevDatabaseMigrator; import io.airbyte.db.instance.development.MigrationDevHelper; +import io.airbyte.protocol.models.StreamDescriptor; import io.airbyte.test.utils.DatabaseConnectionHelper; import java.util.ArrayList; import java.util.List; diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/VaultSecretPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/VaultSecretPersistenceTest.java new file mode 100644 index 000000000000..44251c5b6070 --- /dev/null +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/split_secrets/VaultSecretPersistenceTest.java @@ -0,0 +1,64 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.config.persistence.split_secrets; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; + +import lombok.val; +import org.apache.commons.lang3.RandomUtils; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.testcontainers.vault.VaultContainer; + +public class VaultSecretPersistenceTest { + + private VaultSecretPersistence persistence; + private String baseCoordinate; + + private VaultContainer vaultContainer; + + @BeforeEach + void setUp() { + vaultContainer = new VaultContainer("vault").withVaultToken("vault-dev-token-id"); + vaultContainer.start(); + + val vaultAddress = "http://" + vaultContainer.getHost() + ":" + vaultContainer.getFirstMappedPort(); + + persistence = new VaultSecretPersistence(vaultAddress, "secret/testing", "vault-dev-token-id"); + baseCoordinate = "VaultSecretPersistenceIntegrationTest_coordinate_" + RandomUtils.nextInt() % 20000; + } + + @AfterEach + void tearDown() { + vaultContainer.stop(); + } + + @Test + void testReadWriteUpdate() { + val coordinate1 = new SecretCoordinate(baseCoordinate, 1); + + // try reading non-existent value + val firstRead = persistence.read(coordinate1); + assertThat(firstRead.isEmpty()).isTrue(); + + // write + val firstPayload = "abc"; + persistence.write(coordinate1, firstPayload); + val secondRead = persistence.read(coordinate1); + assertThat(secondRead.isPresent()).isTrue(); + assertEquals(firstPayload, secondRead.get()); + + // update + val secondPayload = "def"; + val coordinate2 = new SecretCoordinate(baseCoordinate, 2); + persistence.write(coordinate2, secondPayload); + val thirdRead = persistence.read(coordinate2); + assertThat(thirdRead.isPresent()).isTrue(); + assertEquals(secondPayload, thirdRead.get()); + } + +} diff --git a/airbyte-config/init/src/main/resources/icons/dockerhub.svg b/airbyte-config/init/src/main/resources/icons/dockerhub.svg new file mode 100644 index 000000000000..a8728893131d --- /dev/null +++ b/airbyte-config/init/src/main/resources/icons/dockerhub.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/airbyte-config/init/src/main/resources/icons/metabase.svg b/airbyte-config/init/src/main/resources/icons/metabase.svg new file mode 100644 index 000000000000..82584726e0a6 --- /dev/null +++ b/airbyte-config/init/src/main/resources/icons/metabase.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/airbyte-config/init/src/main/resources/icons/webflow.svg b/airbyte-config/init/src/main/resources/icons/webflow.svg new file mode 100644 index 000000000000..e4e69b7c5387 --- /dev/null +++ b/airbyte-config/init/src/main/resources/icons/webflow.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml index ca4326fe3352..e65f25977743 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml @@ -1,7 +1,7 @@ - name: Azure Blob Storage destinationDefinitionId: b4c5d105-31fd-4817-96b6-cb923bfc04cb dockerRepository: airbyte/destination-azure-blob-storage - dockerImageTag: 0.1.4 + dockerImageTag: 0.1.5 documentationUrl: https://docs.airbyte.io/integrations/destinations/azureblobstorage icon: azureblobstorage.svg resourceRequirements: @@ -27,7 +27,7 @@ - name: BigQuery destinationDefinitionId: 22f6c74f-5699-40ff-833c-4a879ea40133 dockerRepository: airbyte/destination-bigquery - dockerImageTag: 1.1.8 + dockerImageTag: 1.1.11 documentationUrl: https://docs.airbyte.io/integrations/destinations/bigquery icon: bigquery.svg resourceRequirements: @@ -40,7 +40,7 @@ - name: BigQuery (denormalized typed struct) destinationDefinitionId: 079d5540-f236-4294-ba7c-ade8fd918496 dockerRepository: airbyte/destination-bigquery-denormalized - dockerImageTag: 1.1.8 + dockerImageTag: 1.1.11 documentationUrl: https://docs.airbyte.io/integrations/destinations/bigquery icon: bigquery.svg resourceRequirements: @@ -53,41 +53,41 @@ - name: Cassandra destinationDefinitionId: 707456df-6f4f-4ced-b5c6-03f73bcad1c5 dockerRepository: airbyte/destination-cassandra - dockerImageTag: 0.1.1 + dockerImageTag: 0.1.2 documentationUrl: https://docs.airbyte.io/integrations/destinations/cassandra icon: cassandra.svg releaseStage: alpha - name: Chargify (Keen) destinationDefinitionId: 81740ce8-d764-4ea7-94df-16bb41de36ae dockerRepository: airbyte/destination-keen - dockerImageTag: 0.2.2 + dockerImageTag: 0.2.3 documentationUrl: https://docs.airbyte.io/integrations/destinations/keen icon: chargify.svg releaseStage: alpha - name: Clickhouse destinationDefinitionId: ce0d828e-1dc4-496c-b122-2da42e637e48 dockerRepository: airbyte/destination-clickhouse - dockerImageTag: 0.1.6 + dockerImageTag: 0.1.7 documentationUrl: https://docs.airbyte.io/integrations/destinations/clickhouse releaseStage: alpha -- name: Databricks Delta Lake +- name: Databricks Lakehouse destinationDefinitionId: 072d5540-f236-4294-ba7c-ade8fd918496 dockerRepository: airbyte/destination-databricks - dockerImageTag: 0.2.1 + dockerImageTag: 0.2.3 documentationUrl: https://docs.airbyte.io/integrations/destinations/databricks icon: databricks.svg releaseStage: alpha - name: DynamoDB destinationDefinitionId: 8ccd8909-4e99-4141-b48d-4984b70b2d89 dockerRepository: airbyte/destination-dynamodb - dockerImageTag: 0.1.3 + dockerImageTag: 0.1.4 documentationUrl: https://docs.airbyte.io/integrations/destinations/dynamodb icon: dynamodb.svg releaseStage: alpha - name: E2E Testing destinationDefinitionId: 2eb65e87-983a-4fd7-b3e3-9d9dc6eb8537 dockerRepository: airbyte/destination-e2e-test - dockerImageTag: 0.2.2 + dockerImageTag: 0.2.4 documentationUrl: https://docs.airbyte.io/integrations/destinations/e2e-test icon: airbyte.svg - destinationDefinitionId: 68f351a7-2745-4bef-ad7f-996b8e51bb8c @@ -97,10 +97,16 @@ documentationUrl: https://docs.airbyte.io/integrations/destinations/elasticsearch icon: elasticsearch.svg releaseStage: alpha +- name: Firebolt + destinationDefinitionId: 18081484-02a5-4662-8dba-b270b582f321 + dockerRepository: airbyte/destination-firebolt + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.io/integrations/destinations/firebolt + releaseStage: alpha - name: Google Cloud Storage (GCS) destinationDefinitionId: ca8f6566-e555-4b40-943a-545bf123117a dockerRepository: airbyte/destination-gcs - dockerImageTag: 0.2.6 + dockerImageTag: 0.2.9 documentationUrl: https://docs.airbyte.io/integrations/destinations/gcs icon: googlecloudstorage.svg resourceRequirements: @@ -120,28 +126,28 @@ - name: Google PubSub destinationDefinitionId: 356668e2-7e34-47f3-a3b0-67a8a481b692 dockerRepository: airbyte/destination-pubsub - dockerImageTag: 0.1.4 + dockerImageTag: 0.1.5 documentationUrl: https://docs.airbyte.io/integrations/destinations/pubsub icon: googlepubsub.svg releaseStage: alpha - name: Kafka destinationDefinitionId: 9f760101-60ae-462f-9ee6-b7a9dafd454d dockerRepository: airbyte/destination-kafka - dockerImageTag: 0.1.8 + dockerImageTag: 0.1.9 documentationUrl: https://docs.airbyte.io/integrations/destinations/kafka icon: kafka.svg releaseStage: alpha - name: Kinesis destinationDefinitionId: 6d1d66d4-26ab-4602-8d32-f85894b04955 dockerRepository: airbyte/destination-kinesis - dockerImageTag: 0.1.2 + dockerImageTag: 0.1.3 documentationUrl: https://docs.airbyte.io/integrations/destinations/kinesis icon: kinesis.svg releaseStage: alpha - name: Local CSV destinationDefinitionId: 8be1cf83-fde1-477f-a4ad-318d23c9f3c6 dockerRepository: airbyte/destination-csv - dockerImageTag: 0.2.9 + dockerImageTag: 0.2.10 documentationUrl: https://docs.airbyte.io/integrations/destinations/local-csv icon: file.svg releaseStage: alpha @@ -169,7 +175,7 @@ - name: MeiliSearch destinationDefinitionId: af7c921e-5892-4ff2-b6c1-4a5ab258fb7e dockerRepository: airbyte/destination-meilisearch - dockerImageTag: 0.2.12 + dockerImageTag: 0.2.13 documentationUrl: https://docs.airbyte.io/integrations/destinations/meilisearch icon: meilisearch.svg releaseStage: alpha @@ -183,7 +189,7 @@ - name: MySQL destinationDefinitionId: ca81ee7c-3163-4246-af40-094cc31e5e42 dockerRepository: airbyte/destination-mysql - dockerImageTag: 0.1.18 + dockerImageTag: 0.1.20 documentationUrl: https://docs.airbyte.io/integrations/destinations/mysql icon: mysql.svg releaseStage: alpha @@ -204,7 +210,7 @@ - name: Pulsar destinationDefinitionId: 2340cbba-358e-11ec-8d3d-0242ac130203 dockerRepository: airbyte/destination-pulsar - dockerImageTag: 0.1.1 + dockerImageTag: 0.1.2 documentationUrl: https://docs.airbyte.io/integrations/destinations/pulsar icon: pulsar.svg releaseStage: alpha @@ -218,14 +224,14 @@ - name: Redis destinationDefinitionId: d4d3fef9-e319-45c2-881a-bd02ce44cc9f dockerRepository: airbyte/destination-redis - dockerImageTag: 0.1.1 + dockerImageTag: 0.1.2 documentationUrl: https://docs.airbyte.io/integrations/destinations/redis icon: redis.svg releaseStage: alpha - name: Redshift destinationDefinitionId: f7a7d195-377f-cf5b-70a5-be6b819019dc dockerRepository: airbyte/destination-redshift - dockerImageTag: 0.3.37 + dockerImageTag: 0.3.46 documentationUrl: https://docs.airbyte.io/integrations/destinations/redshift icon: redshift.svg resourceRequirements: @@ -238,13 +244,13 @@ - name: Rockset destinationDefinitionId: 2c9d93a7-9a17-4789-9de9-f46f0097eb70 dockerRepository: airbyte/destination-rockset - dockerImageTag: 0.1.2 + dockerImageTag: 0.1.3 documentationUrl: https://docs.airbyte.io/integrations/destinations/rockset releaseStage: alpha - name: S3 destinationDefinitionId: 4816b78f-1489-44c1-9060-4b19d5fa9362 dockerRepository: airbyte/destination-s3 - dockerImageTag: 0.3.6 + dockerImageTag: 0.3.9 documentationUrl: https://docs.airbyte.io/integrations/destinations/s3 icon: s3.svg resourceRequirements: @@ -264,7 +270,7 @@ - name: Snowflake destinationDefinitionId: 424892c4-daac-4491-b35d-c6688ba547ba dockerRepository: airbyte/destination-snowflake - dockerImageTag: 0.4.28 + dockerImageTag: 0.4.30 documentationUrl: https://docs.airbyte.io/integrations/destinations/snowflake icon: snowflake.svg resourceRequirements: @@ -277,7 +283,7 @@ - name: MariaDB ColumnStore destinationDefinitionId: 294a4790-429b-40ae-9516-49826b9702e1 dockerRepository: airbyte/destination-mariadb-columnstore - dockerImageTag: 0.1.4 + dockerImageTag: 0.1.6 documentationUrl: https://docs.airbyte.io/integrations/destinations/mariadb-columnstore icon: mariadb.svg releaseStage: alpha @@ -291,7 +297,7 @@ - name: Scylla destinationDefinitionId: 3dc6f384-cd6b-4be3-ad16-a41450899bf0 dockerRepository: airbyte/destination-scylla - dockerImageTag: 0.1.1 + dockerImageTag: 0.1.2 documentationUrl: https://docs.airbyte.io/integrations/destinations/scylla icon: scylla.svg - name: Google Sheets diff --git a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml index 1c91c36da49f..9e6c8facadd6 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml @@ -1,7 +1,7 @@ # This file is generated by io.airbyte.config.specs.SeedConnectorSpecGenerator. # Do NOT edit this file directly. See generator class for more details. --- -- dockerImage: "airbyte/destination-azure-blob-storage:0.1.4" +- dockerImage: "airbyte/destination-azure-blob-storage:0.1.5" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/azureblobstorage" connectionSpecification: @@ -285,7 +285,7 @@ supported_destination_sync_modes: - "overwrite" - "append" -- dockerImage: "airbyte/destination-bigquery:1.1.8" +- dockerImage: "airbyte/destination-bigquery:1.1.11" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/bigquery" connectionSpecification: @@ -366,7 +366,6 @@ order: 3 oneOf: - title: "Standard Inserts" - additionalProperties: false required: - "method" properties: @@ -374,7 +373,6 @@ type: "string" const: "Standard" - title: "GCS Staging" - additionalProperties: false required: - "method" - "gcs_bucket_name" @@ -438,19 +436,6 @@ examples: - "data_sync/test" order: 3 - part_size_mb: - title: "Block Size (MB) for GCS Multipart Upload (Optional)" - description: "This is the size of a \"Part\" being buffered in memory.\ - \ It limits the memory usage when writing. Larger values will allow\ - \ to upload a bigger files and improve the speed, but consumes more\ - \ memory. Allowed values: min=5MB, max=525MB Default: 5MB." - type: "integer" - default: 5 - minimum: 5 - maximum: 525 - examples: - - 5 - order: 4 keep_files_in_gcs-bucket: type: "string" description: "This upload method is supposed to temporary store records\ @@ -462,7 +447,7 @@ enum: - "Delete all tmp files from GCS" - "Keep all tmp files in GCS" - order: 5 + order: 4 credentials_json: type: "string" description: "The contents of the JSON service account key. Check out the\ @@ -510,7 +495,7 @@ - "overwrite" - "append" - "append_dedup" -- dockerImage: "airbyte/destination-bigquery-denormalized:1.1.8" +- dockerImage: "airbyte/destination-bigquery-denormalized:1.1.11" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/bigquery" connectionSpecification: @@ -551,7 +536,6 @@ order: 2 oneOf: - title: "Standard Inserts" - additionalProperties: false required: - "method" properties: @@ -559,7 +543,6 @@ type: "string" const: "Standard" - title: "GCS Staging" - additionalProperties: false type: "object" required: - "method" @@ -627,19 +610,6 @@ examples: - "data_sync/test" order: 3 - part_size_mb: - title: "Block Size (MB) for GCS Multipart Upload (Optional)" - description: "This is the size of a \"Part\" being buffered in memory.\ - \ It limits the memory usage when writing. Larger values will allow\ - \ to upload a bigger files and improve the speed, but consumes more\ - \ memory. Allowed values: min=5MB, max=525MB Default: 5MB." - type: "integer" - default: 5 - minimum: 5 - maximum: 525 - examples: - - 5 - order: 4 keep_files_in_gcs-bucket: type: "string" description: "This upload method is supposed to temporary store records\ @@ -651,7 +621,7 @@ enum: - "Delete all tmp files from GCS" - "Keep all tmp files in GCS" - order: 5 + order: 4 credentials_json: type: "string" description: "The contents of the JSON service account key. Check out the\ @@ -723,7 +693,7 @@ supported_destination_sync_modes: - "overwrite" - "append" -- dockerImage: "airbyte/destination-cassandra:0.1.1" +- dockerImage: "airbyte/destination-cassandra:0.1.2" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/cassandra" connectionSpecification: @@ -788,7 +758,7 @@ supported_destination_sync_modes: - "overwrite" - "append" -- dockerImage: "airbyte/destination-keen:0.2.2" +- dockerImage: "airbyte/destination-keen:0.2.3" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/keen" connectionSpecification: @@ -827,7 +797,7 @@ supported_destination_sync_modes: - "overwrite" - "append" -- dockerImage: "airbyte/destination-clickhouse:0.1.6" +- dockerImage: "airbyte/destination-clickhouse:0.1.7" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/clickhouse" connectionSpecification: @@ -996,12 +966,12 @@ - "overwrite" - "append" - "append_dedup" -- dockerImage: "airbyte/destination-databricks:0.2.1" +- dockerImage: "airbyte/destination-databricks:0.2.3" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/databricks" connectionSpecification: $schema: "http://json-schema.org/draft-07/schema#" - title: "Databricks Delta Lake Destination Spec" + title: "Databricks Lakehouse Destination Spec" type: "object" required: - "accept_terms" @@ -1160,7 +1130,7 @@ supported_destination_sync_modes: - "overwrite" - "append" -- dockerImage: "airbyte/destination-dynamodb:0.1.3" +- dockerImage: "airbyte/destination-dynamodb:0.1.4" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/dynamodb" connectionSpecification: @@ -1241,7 +1211,7 @@ supported_destination_sync_modes: - "overwrite" - "append" -- dockerImage: "airbyte/destination-e2e-test:0.2.2" +- dockerImage: "airbyte/destination-e2e-test:0.2.4" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/e2e-test" connectionSpecification: @@ -1486,7 +1456,103 @@ - "overwrite" - "append" supportsNamespaces: true -- dockerImage: "airbyte/destination-gcs:0.2.6" +- dockerImage: "airbyte/destination-firebolt:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/destinations/firebolt" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Firebolt Spec" + type: "object" + required: + - "username" + - "password" + - "database" + additionalProperties: false + properties: + username: + type: "string" + title: "Username" + description: "Firebolt email address you use to login." + examples: + - "username@email.com" + order: 0 + password: + type: "string" + title: "Password" + description: "Firebolt password." + airbyte_secret: true + order: 1 + account: + type: "string" + title: "Account" + description: "Firebolt account to login." + host: + type: "string" + title: "Host" + description: "The host name of your Firebolt database." + examples: + - "api.app.firebolt.io" + database: + type: "string" + title: "Database" + description: "The database to connect to." + engine: + type: "string" + title: "Engine" + description: "Engine name or url to connect to." + loading_method: + type: "object" + title: "Loading Method" + description: "Loading method used to select the way data will be uploaded\ + \ to Firebolt" + oneOf: + - title: "SQL Inserts" + additionalProperties: false + required: + - "method" + properties: + method: + type: "string" + const: "SQL" + - title: "External Table via S3" + additionalProperties: false + required: + - "method" + - "s3_bucket" + - "s3_region" + - "aws_key_id" + - "aws_key_secret" + properties: + method: + type: "string" + const: "S3" + s3_bucket: + type: "string" + title: "S3 bucket name" + description: "The name of the S3 bucket." + s3_region: + type: "string" + title: "S3 region name" + description: "Region name of the S3 bucket." + examples: + - "us-east-1" + aws_key_id: + type: "string" + title: "AWS Key ID" + airbyte_secret: true + description: "AWS access key granting read and write access to S3." + aws_key_secret: + type: "string" + title: "AWS Key Secret" + airbyte_secret: true + description: "Corresponding secret part of the AWS Key" + supportsIncremental: true + supportsNormalization: false + supportsDBT: true + supported_destination_sync_modes: + - "overwrite" + - "append" +- dockerImage: "airbyte/destination-gcs:0.2.9" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/gcs" connectionSpecification: @@ -1498,7 +1564,6 @@ - "gcs_bucket_path" - "credential" - "format" - additionalProperties: false properties: gcs_bucket_name: title: "GCS Bucket Name" @@ -1720,16 +1785,6 @@ enum: - "snappy" default: "snappy" - part_size_mb: - title: "Block Size (MB) for GCS multipart upload (Optional)" - description: "This is the size of a \"Part\" being buffered in memory.\ - \ It limits the memory usage when writing. Larger values will allow\ - \ to upload a bigger files and improve the speed, but consumes9\ - \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." - type: "integer" - default: 5 - examples: - - 5 - title: "CSV: Comma-Separated Values" required: - "format_type" @@ -1748,16 +1803,6 @@ enum: - "No flattening" - "Root level flattening" - part_size_mb: - title: "Block Size (MB) for GCS multipart upload (Optional)" - description: "This is the size of a \"Part\" being buffered in memory.\ - \ It limits the memory usage when writing. Larger values will allow\ - \ to upload a bigger files and improve the speed, but consumes9\ - \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." - type: "integer" - default: 5 - examples: - - 5 compression: title: "Compression" type: "object" @@ -1792,16 +1837,6 @@ enum: - "JSONL" default: "JSONL" - part_size_mb: - title: "Block Size (MB) for GCS multipart upload (Optional)" - description: "This is the size of a \"Part\" being buffered in memory.\ - \ It limits the memory usage when writing. Larger values will allow\ - \ to upload a bigger files and improve the speed, but consumes9\ - \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." - type: "integer" - default: 5 - examples: - - 5 compression: title: "Compression" type: "object" @@ -1926,7 +1961,7 @@ supported_destination_sync_modes: - "append" - "overwrite" -- dockerImage: "airbyte/destination-pubsub:0.1.4" +- dockerImage: "airbyte/destination-pubsub:0.1.5" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/pubsub" connectionSpecification: @@ -1959,7 +1994,7 @@ supportsDBT: false supported_destination_sync_modes: - "append" -- dockerImage: "airbyte/destination-kafka:0.1.8" +- dockerImage: "airbyte/destination-kafka:0.1.9" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/kafka" connectionSpecification: @@ -2250,7 +2285,7 @@ supportsDBT: false supported_destination_sync_modes: - "append" -- dockerImage: "airbyte/destination-kinesis:0.1.2" +- dockerImage: "airbyte/destination-kinesis:0.1.3" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/kinesis" connectionSpecification: @@ -2308,7 +2343,7 @@ supportsDBT: false supported_destination_sync_modes: - "append" -- dockerImage: "airbyte/destination-csv:0.2.9" +- dockerImage: "airbyte/destination-csv:0.2.10" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/local-csv" connectionSpecification: @@ -2692,7 +2727,7 @@ - "overwrite" - "append" - "append_dedup" -- dockerImage: "airbyte/destination-meilisearch:0.2.12" +- dockerImage: "airbyte/destination-meilisearch:0.2.13" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/meilisearch" connectionSpecification: @@ -2863,7 +2898,7 @@ supported_destination_sync_modes: - "overwrite" - "append" -- dockerImage: "airbyte/destination-mysql:0.1.18" +- dockerImage: "airbyte/destination-mysql:0.1.20" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/mysql" connectionSpecification: @@ -3433,7 +3468,7 @@ - "overwrite" - "append" - "append_dedup" -- dockerImage: "airbyte/destination-pulsar:0.1.1" +- dockerImage: "airbyte/destination-pulsar:0.1.2" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/pulsar" connectionSpecification: @@ -3623,7 +3658,7 @@ supportsDBT: false supported_destination_sync_modes: - "append" -- dockerImage: "airbyte/destination-redis:0.1.1" +- dockerImage: "airbyte/destination-redis:0.1.2" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/redis" connectionSpecification: @@ -3678,7 +3713,7 @@ supported_destination_sync_modes: - "overwrite" - "append" -- dockerImage: "airbyte/destination-redshift:0.3.37" +- dockerImage: "airbyte/destination-redshift:0.3.46" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/redshift" connectionSpecification: @@ -3730,94 +3765,142 @@ - "public" default: "public" title: "Default Schema" - s3_bucket_name: - title: "S3 Bucket Name (Optional)" - type: "string" - description: "The name of the staging S3 bucket to use if utilising a COPY\ - \ strategy. COPY is recommended for production workloads for better speed\ - \ and scalability. See AWS docs for more details." - examples: - - "airbyte.staging" - s3_bucket_path: - title: "S3 Bucket Path (Optional)" - type: "string" - description: "The directory under the S3 bucket where data will be written.\ - \ If not provided, then defaults to the root directory. See path's name recommendations for more details." - examples: - - "data_sync/test" - s3_bucket_region: - title: "S3 Bucket Region (Optional)" - type: "string" - default: "" - description: "The region of the S3 staging bucket to use if utilising a\ - \ COPY strategy. See AWS docs for details." - enum: - - "" - - "us-east-1" - - "us-east-2" - - "us-west-1" - - "us-west-2" - - "af-south-1" - - "ap-east-1" - - "ap-south-1" - - "ap-northeast-1" - - "ap-northeast-2" - - "ap-northeast-3" - - "ap-southeast-1" - - "ap-southeast-2" - - "ca-central-1" - - "cn-north-1" - - "cn-northwest-1" - - "eu-central-1" - - "eu-north-1" - - "eu-south-1" - - "eu-west-1" - - "eu-west-2" - - "eu-west-3" - - "sa-east-1" - - "me-south-1" - access_key_id: - type: "string" - description: "This ID grants access to the above S3 staging bucket. Airbyte\ - \ requires Read and Write permissions to the given bucket. See AWS docs on how to generate an access key ID and secret access key." - title: "S3 Key Id (Optional)" - airbyte_secret: true - secret_access_key: - type: "string" - description: "The corresponding secret to the above access key id. See AWS docs on how to generate an access key ID and secret access key." - title: "S3 Access Key (Optional)" - airbyte_secret: true - part_size: - type: "integer" - minimum: 10 - maximum: 100 - examples: - - "10" - description: "Increase this if syncing tables larger than 100GB. Only relevant\ - \ for COPY. Files are streamed to S3 in parts. This determines the size\ - \ of each part, in MBs. As S3 has a limit of 10,000 parts per file, part\ - \ size affects the table size. This is 10MB by default, resulting in a\ - \ default limit of 100GB tables. Note: a larger part size will result\ - \ in larger memory requirements. A rule of thumb is to multiply the part\ - \ size by 10 to get the memory requirement. Modify this with care. See\ - \ docs for details." - title: "Stream Part Size (Optional)" - purge_staging_data: - title: "Purge Staging Files and Tables (Optional)" - type: "boolean" - description: "Whether to delete the staging files from S3 after completing\ - \ the sync. See docs for details." - default: true + uploading_method: + title: "Uploading Method" + type: "object" + description: "The method how the data will be uploaded to the database." + oneOf: + - title: "Standard" + required: + - "method" + properties: + method: + type: "string" + const: "Standard" + - title: "S3 Staging" + required: + - "method" + - "s3_bucket_name" + - "s3_bucket_region" + - "access_key_id" + - "secret_access_key" + properties: + method: + type: "string" + const: "S3 Staging" + s3_bucket_name: + title: "S3 Bucket Name" + type: "string" + description: "The name of the staging S3 bucket to use if utilising\ + \ a COPY strategy. COPY is recommended for production workloads\ + \ for better speed and scalability. See AWS docs for more details." + examples: + - "airbyte.staging" + s3_bucket_path: + title: "S3 Bucket Path (Optional)" + type: "string" + description: "The directory under the S3 bucket where data will be\ + \ written. If not provided, then defaults to the root directory.\ + \ See path's name recommendations for more details." + examples: + - "data_sync/test" + s3_bucket_region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: "The region of the S3 staging bucket to use if utilising\ + \ a COPY strategy. See AWS docs for details." + enum: + - "" + - "us-east-1" + - "us-east-2" + - "us-west-1" + - "us-west-2" + - "af-south-1" + - "ap-east-1" + - "ap-south-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-southeast-1" + - "ap-southeast-2" + - "ca-central-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-north-1" + - "eu-south-1" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "sa-east-1" + - "me-south-1" + access_key_id: + type: "string" + description: "This ID grants access to the above S3 staging bucket.\ + \ Airbyte requires Read and Write permissions to the given bucket.\ + \ See AWS docs on how to generate an access key ID and secret access\ + \ key." + title: "S3 Key Id" + airbyte_secret: true + secret_access_key: + type: "string" + description: "The corresponding secret to the above access key id.\ + \ See AWS docs on how to generate an access key ID and secret access\ + \ key." + title: "S3 Access Key" + airbyte_secret: true + purge_staging_data: + title: "Purge Staging Files and Tables (Optional)" + type: "boolean" + description: "Whether to delete the staging files from S3 after completing\ + \ the sync. See docs for details." + default: true + encryption: + title: "Encryption" + type: "object" + description: "How to encrypt the staging data" + default: + encryption_type: "none" + oneOf: + - title: "No encryption" + description: "Staging data will be stored in plaintext." + type: "object" + required: + - "encryption_type" + properties: + encryption_type: + type: "string" + const: "none" + enum: + - "none" + default: "none" + - title: "AES-CBC envelope encryption" + description: "Staging data will be encrypted using AES-CBC envelope\ + \ encryption." + type: "object" + required: + - "encryption_type" + properties: + encryption_type: + type: "string" + const: "aes_cbc_envelope" + enum: + - "aes_cbc_envelope" + default: "aes_cbc_envelope" + key_encrypting_key: + type: "string" + title: "Key" + description: "The key, base64-encoded. Must be either 128, 192,\ + \ or 256 bits. Leave blank to have Airbyte generate an ephemeral\ + \ key for each sync." + airbyte_secret: true supportsIncremental: true supportsNormalization: true supportsDBT: true @@ -3825,7 +3908,7 @@ - "overwrite" - "append" - "append_dedup" -- dockerImage: "airbyte/destination-rockset:0.1.2" +- dockerImage: "airbyte/destination-rockset:0.1.3" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/rockset" connectionSpecification: @@ -3868,7 +3951,7 @@ supported_destination_sync_modes: - "append" - "overwrite" -- dockerImage: "airbyte/destination-s3:0.3.6" +- dockerImage: "airbyte/destination-s3:0.3.9" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/s3" connectionSpecification: @@ -3880,7 +3963,6 @@ - "s3_bucket_path" - "s3_bucket_region" - "format" - additionalProperties: false properties: access_key_id: type: "string" @@ -4287,7 +4369,7 @@ supported_destination_sync_modes: - "overwrite" - "append" -- dockerImage: "airbyte/destination-snowflake:0.4.28" +- dockerImage: "airbyte/destination-snowflake:0.4.30" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/snowflake" connectionSpecification: @@ -4418,7 +4500,6 @@ order: 8 oneOf: - title: "Select another option" - additionalProperties: false description: "Select another option" required: - "method" @@ -4431,7 +4512,6 @@ - "Standard" default: "Standard" - title: "[Recommended] Internal Staging" - additionalProperties: false description: "Writes large batches of records to a file, uploads the file\ \ to Snowflake, then uses
COPY INTO table
to upload the file.\ \ Recommended for large production workloads for better speed and scalability." @@ -4446,7 +4526,6 @@ - "Internal Staging" default: "Internal Staging" - title: "AWS S3 Staging" - additionalProperties: false description: "Writes large batches of records to a file, uploads the file\ \ to S3, then uses
COPY INTO table
to upload the file. Recommended\ \ for large production workloads for better speed and scalability." @@ -4519,21 +4598,6 @@ title: "S3 Access Key" airbyte_secret: true order: 4 - part_size: - type: "integer" - default: 5 - examples: - - 5 - description: "Optional. Increase this if syncing tables larger than\ - \ 100GB. Only relevant for COPY. Files are streamed to S3 in parts.\ - \ This determines the size of each part, in MBs. As S3 has a limit\ - \ of 10,000 parts per file, part size affects the table size. This\ - \ is 10MB by default, resulting in a default limit of 100GB tables.\ - \ Note, a larger part size will result in larger memory requirements.\ - \ A rule of thumb is to multiply the part size by 10 to get the\ - \ memory requirement. Modify this with care." - title: "Stream Part Size" - order: 5 purge_staging_data: title: "Purge Staging Files and Tables" type: "boolean" @@ -4541,14 +4605,14 @@ \ the sync. See the docs for details. Only relevant for COPY. Defaults\ \ to true." default: true - order: 6 + order: 5 encryption: title: "Encryption" type: "object" description: "How to encrypt the staging data" default: encryption_type: "none" - order: 7 + order: 6 oneOf: - title: "No encryption" description: "Staging data will be stored in plaintext." @@ -4583,7 +4647,6 @@ \ key for each sync." airbyte_secret: true - title: "GCS Staging" - additionalProperties: false description: "Writes large batches of records to a file, uploads the file\ \ to GCS, then uses
COPY INTO table
to upload the file. Recommended\ \ for large production workloads for better speed and scalability." @@ -4630,7 +4693,6 @@ multiline: true order: 3 - title: "Azure Blob Storage Staging" - additionalProperties: false description: "Writes large batches of records to a file, uploads the file\ \ to Azure Blob Storage, then uses
COPY INTO table
to upload\ \ the file. Recommended for large production workloads for better speed\ @@ -4742,7 +4804,7 @@ path_in_connector_config: - "credentials" - "client_secret" -- dockerImage: "airbyte/destination-mariadb-columnstore:0.1.4" +- dockerImage: "airbyte/destination-mariadb-columnstore:0.1.6" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/mariadb-columnstore" connectionSpecification: @@ -4921,7 +4983,7 @@ supported_destination_sync_modes: - "append" - "append_dedup" -- dockerImage: "airbyte/destination-scylla:0.1.1" +- dockerImage: "airbyte/destination-scylla:0.1.2" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/scylla" connectionSpecification: diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index ffac20538f60..3223299cf1b2 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -25,7 +25,7 @@ - name: Amazon Seller Partner sourceDefinitionId: e55879a8-0ef8-4557-abcf-ab34c53ec460 dockerRepository: airbyte/source-amazon-seller-partner - dockerImageTag: 0.2.21 + dockerImageTag: 0.2.22 sourceType: api documentationUrl: https://docs.airbyte.io/integrations/sources/amazon-seller-partner icon: amazonsellerpartner.svg @@ -40,7 +40,7 @@ - name: Amplitude sourceDefinitionId: fa9f58c6-2d03-4237-aaa4-07d75e0c1396 dockerRepository: airbyte/source-amplitude - dockerImageTag: 0.1.8 + dockerImageTag: 0.1.10 documentationUrl: https://docs.airbyte.io/integrations/sources/amplitude icon: amplitude.svg sourceType: api @@ -96,7 +96,7 @@ - name: BigQuery sourceDefinitionId: bfd1ddf8-ae8a-4620-b1d7-55597d2ba08c dockerRepository: airbyte/source-bigquery - dockerImageTag: 0.1.7 + dockerImageTag: 0.1.8 documentationUrl: https://docs.airbyte.io/integrations/sources/bigquery icon: bigquery.svg sourceType: database @@ -104,7 +104,7 @@ - name: Bing Ads sourceDefinitionId: 47f25999-dd5e-4636-8c39-e7cea2453331 dockerRepository: airbyte/source-bing-ads - dockerImageTag: 0.1.7 + dockerImageTag: 0.1.8 documentationUrl: https://docs.airbyte.io/integrations/sources/bing-ads icon: bingads.svg sourceType: api @@ -200,7 +200,7 @@ - name: Delighted sourceDefinitionId: cc88c43f-6f53-4e8a-8c4d-b284baaf9635 dockerRepository: airbyte/source-delighted - dockerImageTag: 0.1.3 + dockerImageTag: 0.1.4 documentationUrl: https://docs.airbyte.io/integrations/sources/delighted icon: delighted.svg sourceType: api @@ -213,6 +213,14 @@ icon: dixa.svg sourceType: api releaseStage: alpha +- name: Dockerhub + sourceDefinitionId: 72d405a3-56d8-499f-a571-667c03406e43 + dockerRepository: airbyte/source-dockerhub + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.io/integrations/sources/dockerhub + icon: dockerhub.svg + sourceType: api + releaseStage: alpha - name: Drift sourceDefinitionId: 445831eb-78db-4b1f-8f1f-0d96ad8739e2 dockerRepository: airbyte/source-drift @@ -224,7 +232,7 @@ - name: E2E Testing sourceDefinitionId: d53f9084-fa6b-4a5a-976c-5b8392f4ad8a dockerRepository: airbyte/source-e2e-test - dockerImageTag: 2.1.0 + dockerImageTag: 2.1.1 documentationUrl: https://docs.airbyte.io/integrations/sources/e2e-test icon: airbyte.svg sourceType: api @@ -240,7 +248,7 @@ - name: Facebook Marketing sourceDefinitionId: e7778cfc-e97c-4458-9ecb-b4f2bba8946c dockerRepository: airbyte/source-facebook-marketing - dockerImageTag: 0.2.50 + dockerImageTag: 0.2.53 documentationUrl: https://docs.airbyte.io/integrations/sources/facebook-marketing icon: facebook.svg sourceType: api @@ -256,7 +264,7 @@ - name: Faker sourceDefinitionId: dfd88b22-b603-4c3d-aad7-3701784586b1 dockerRepository: airbyte/source-faker - dockerImageTag: 0.1.4 + dockerImageTag: 0.1.5 documentationUrl: https://docs.airbyte.com/integrations/source-faker sourceType: api releaseStage: alpha @@ -295,7 +303,7 @@ - name: GitHub sourceDefinitionId: ef69ef6e-aa7f-4af1-a01d-ef775033524e dockerRepository: airbyte/source-github - dockerImageTag: 0.2.33 + dockerImageTag: 0.2.38 documentationUrl: https://docs.airbyte.io/integrations/sources/github icon: github.svg sourceType: api @@ -303,7 +311,7 @@ - name: Gitlab sourceDefinitionId: 5e6175e5-68e1-4c17-bff9-56103bbb0d80 dockerRepository: airbyte/source-gitlab - dockerImageTag: 0.1.5 + dockerImageTag: 0.1.6 documentationUrl: https://docs.airbyte.io/integrations/sources/gitlab icon: gitlab.svg sourceType: api @@ -311,7 +319,7 @@ - name: Google Ads sourceDefinitionId: 253487c0-2246-43ba-a21f-5116b20a2c50 dockerRepository: airbyte/source-google-ads - dockerImageTag: 0.1.41 + dockerImageTag: 0.1.42 documentationUrl: https://docs.airbyte.io/integrations/sources/google-ads icon: google-adwords.svg sourceType: api @@ -324,6 +332,14 @@ icon: google-analytics.svg sourceType: api releaseStage: beta +- name: Google Analytics Data API + sourceDefinitionId: 3cc2eafd-84aa-4dca-93af-322d9dfeec1a + dockerRepository: airbyte/source-google-analytics-data-api + dockerImageTag: 0.0.1 + documentationUrl: https://docs.airbyte.io/integrations/sources/google-analytics-data-api + icon: google-analytics.svg + sourceType: api + releaseStage: alpha - name: Google Directory sourceDefinitionId: d19ae824-e289-4b14-995a-0632eb46d246 dockerRepository: airbyte/source-google-directory @@ -390,7 +406,7 @@ - name: HubSpot sourceDefinitionId: 36c891d9-4bd9-43ac-bad2-10e12756272c dockerRepository: airbyte/source-hubspot - dockerImageTag: 0.1.68 + dockerImageTag: 0.1.72 documentationUrl: https://docs.airbyte.io/integrations/sources/hubspot icon: hubspot.svg sourceType: api @@ -398,7 +414,7 @@ - name: IBM Db2 sourceDefinitionId: 447e0381-3780-4b46-bb62-00a4e3c8b8e2 dockerRepository: airbyte/source-db2 - dockerImageTag: 0.1.10 + dockerImageTag: 0.1.11 documentationUrl: https://docs.airbyte.io/integrations/sources/db2 icon: db2.svg sourceType: database @@ -414,7 +430,7 @@ - name: Intercom sourceDefinitionId: d8313939-3782-41b0-be29-b3ca20d8dd3a dockerRepository: airbyte/source-intercom - dockerImageTag: 0.1.19 + dockerImageTag: 0.1.20 documentationUrl: https://docs.airbyte.io/integrations/sources/intercom icon: intercom.svg sourceType: api @@ -446,7 +462,7 @@ - name: Kafka sourceDefinitionId: d917a47b-8537-4d0d-8c10-36a9928d4265 dockerRepository: airbyte/source-kafka - dockerImageTag: 0.1.6 + dockerImageTag: 0.1.7 documentationUrl: https://docs.airbyte.io/integrations/sources/kafka icon: kafka.svg sourceType: database @@ -517,15 +533,23 @@ - name: Marketo sourceDefinitionId: 9e0556f4-69df-4522-a3fb-03264d36b348 dockerRepository: airbyte/source-marketo - dockerImageTag: 0.1.3 + dockerImageTag: 0.1.4 documentationUrl: https://docs.airbyte.io/integrations/sources/marketo icon: marketo.svg sourceType: api releaseStage: alpha +- name: Metabase + sourceDefinitionId: c7cb421b-942e-4468-99ee-e369bcabaec5 + dockerRepository: airbyte/source-metabase + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.io/integrations/sources/metabase + icon: metabase.svg + sourceType: api + releaseStage: alpha - name: Microsoft SQL Server (MSSQL) sourceDefinitionId: b5ea17b1-f170-46dc-bc31-cc744ca984c1 dockerRepository: airbyte/source-mssql - dockerImageTag: 0.4.2 + dockerImageTag: 0.4.5 documentationUrl: https://docs.airbyte.io/integrations/sources/mssql icon: mssql.svg sourceType: database @@ -557,7 +581,7 @@ - name: MongoDb sourceDefinitionId: b2e713cd-cc36-4c0a-b5bd-b47cb8a0561e dockerRepository: airbyte/source-mongodb-v2 - dockerImageTag: 0.1.14 + dockerImageTag: 0.1.15 documentationUrl: https://docs.airbyte.io/integrations/sources/mongodb-v2 icon: mongodb.svg sourceType: database @@ -573,7 +597,7 @@ - name: MySQL sourceDefinitionId: 435bb9a5-7887-4809-aa58-28c27df0d7ad dockerRepository: airbyte/source-mysql - dockerImageTag: 0.5.11 + dockerImageTag: 0.5.15 documentationUrl: https://docs.airbyte.io/integrations/sources/mysql icon: mysql.svg sourceType: database @@ -605,14 +629,14 @@ - name: OpenWeather sourceDefinitionId: d8540a80-6120-485d-b7d6-272bca477d9b dockerRepository: airbyte/source-openweather - dockerImageTag: 0.1.4 + dockerImageTag: 0.1.5 documentationUrl: https://docs.airbyte.io/integrations/sources/openweather sourceType: api releaseStage: alpha - name: Oracle DB sourceDefinitionId: b39a7370-74c3-45a6-ac3a-380d48520a83 dockerRepository: airbyte/source-oracle - dockerImageTag: 0.3.15 + dockerImageTag: 0.3.17 documentationUrl: https://docs.airbyte.io/integrations/sources/oracle icon: oracle.svg sourceType: database @@ -625,6 +649,14 @@ icon: orb.svg sourceType: api releaseStage: alpha +- name: Orbit + sourceDefinitionId: 95bcc041-1d1a-4c2e-8802-0ca5b1bfa36a + dockerRepository: airbyte/source-orbit + dockerImageTag: 0.1.1 + documentationUrl: https://docs.airbyte.io/integrations/sources/orbit + icon: orbit.svg + sourceType: api + releaseStage: alpha - sourceDefinitionId: 3490c201-5d95-4783-b600-eaf07a4c7787 name: Outreach dockerRepository: airbyte/source-outreach @@ -644,7 +676,7 @@ - name: Paypal Transaction sourceDefinitionId: d913b0f2-cc51-4e55-a44c-8ba1697b9239 dockerRepository: airbyte/source-paypal-transaction - dockerImageTag: 0.1.5 + dockerImageTag: 0.1.6 documentationUrl: https://docs.airbyte.io/integrations/sources/paypal-transaction icon: paypal.svg sourceType: api @@ -715,7 +747,7 @@ - name: Postgres sourceDefinitionId: decd338e-5647-4c0b-adf4-da0e75f5a750 dockerRepository: airbyte/source-postgres - dockerImageTag: 0.4.21 + dockerImageTag: 0.4.28 documentationUrl: https://docs.airbyte.io/integrations/sources/postgres icon: postgresql.svg sourceType: database @@ -802,7 +834,7 @@ - name: Salesforce sourceDefinitionId: b117307c-14b6-41aa-9422-947e34922962 dockerRepository: airbyte/source-salesforce - dockerImageTag: 1.0.9 + dockerImageTag: 1.0.10 documentationUrl: https://docs.airbyte.io/integrations/sources/salesforce icon: salesforce.svg sourceType: api @@ -921,7 +953,7 @@ - name: TiDB sourceDefinitionId: 0dad1a35-ccf8-4d03-b73e-6788c00b13ae dockerRepository: airbyte/source-tidb - dockerImageTag: 0.1.1 + dockerImageTag: 0.1.2 documentationUrl: https://docs.airbyte.io/integrations/sources/tidb icon: tidb.svg sourceType: database @@ -929,7 +961,7 @@ - name: TikTok Marketing sourceDefinitionId: 4bfac00d-ce15-44ff-95b9-9e3c3e8fbd35 dockerRepository: airbyte/source-tiktok-marketing - dockerImageTag: 0.1.12 + dockerImageTag: 0.1.13 documentationUrl: https://docs.airbyte.io/integrations/sources/tiktok-marketing icon: tiktok.svg sourceType: api @@ -945,7 +977,7 @@ - name: Twilio sourceDefinitionId: b9dc6155-672e-42ea-b10d-9f1f1fb95ab1 dockerRepository: airbyte/source-twilio - dockerImageTag: 0.1.4 + dockerImageTag: 0.1.6 documentationUrl: https://docs.airbyte.io/integrations/sources/twilio icon: twilio.svg sourceType: api @@ -953,7 +985,7 @@ - name: Typeform sourceDefinitionId: e7eff203-90bf-43e5-a240-19ea3056c474 dockerRepository: airbyte/source-typeform - dockerImageTag: 0.1.6 + dockerImageTag: 0.1.7 documentationUrl: https://docs.airbyte.io/integrations/sources/typeform icon: typeform.svg sourceType: api @@ -982,10 +1014,18 @@ icon: victorops.svg sourceType: api releaseStage: alpha +- name: Webflow + sourceDefinitionId: ef580275-d9a9-48bb-af5e-db0f5855be04 + dockerRepository: airbyte/source-webflow + dockerImageTag: 0.1.1 + documentationUrl: https://docs.airbyte.io/integrations/sources/webflow + icon: webflow.svg + sourceType: api + releaseStage: alpha - name: Zendesk Chat sourceDefinitionId: 40d24d0f-b8f9-4fe0-9e6c-b06c0f3f45e4 dockerRepository: airbyte/source-zendesk-chat - dockerImageTag: 0.1.7 + dockerImageTag: 0.1.8 documentationUrl: https://docs.airbyte.io/integrations/sources/zendesk-chat icon: zendesk.svg sourceType: api @@ -1001,7 +1041,7 @@ - name: Zendesk Support sourceDefinitionId: 79c1aa37-dae3-42ae-b333-d1c105477715 dockerRepository: airbyte/source-zendesk-support - dockerImageTag: 0.2.9 + dockerImageTag: 0.2.11 documentationUrl: https://docs.airbyte.io/integrations/sources/zendesk-support icon: zendesk.svg sourceType: api @@ -1061,7 +1101,14 @@ - name: SFTP sourceDefinitionId: a827c52e-791c-4135-a245-e233c5255199 dockerRepository: airbyte/source-sftp - dockerImageTag: 0.1.1 + dockerImageTag: 0.1.2 documentationUrl: https://docs.airbyte.com/integrations/sources/sftp sourceType: file releaseStage: alpha +- name: Firebolt + sourceDefinitionId: 6f2ac653-8623-43c4-8950-19218c7caf3d + dockerRepository: airbyte/source-firebolt + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.io/integrations/sources/firebolt + sourceType: database + releaseStage: alpha diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index b84d78bd2fc7..5d81325fdcec 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -213,7 +213,7 @@ type: "string" path_in_connector_config: - "client_secret" -- dockerImage: "airbyte/source-amazon-seller-partner:0.2.21" +- dockerImage: "airbyte/source-amazon-seller-partner:0.2.22" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/amazon-seller-partner" changelogUrl: "https://docs.airbyte.io/integrations/sources/amazon-seller-partner" @@ -547,7 +547,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-amplitude:0.1.8" +- dockerImage: "airbyte/source-amplitude:0.1.10" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/amplitude" connectionSpecification: @@ -830,7 +830,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-bigquery:0.1.7" +- dockerImage: "airbyte/source-bigquery:0.1.8" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/bigquery" connectionSpecification: @@ -868,7 +868,7 @@ - "overwrite" - "append" - "append_dedup" -- dockerImage: "airbyte/source-bing-ads:0.1.7" +- dockerImage: "airbyte/source-bing-ads:0.1.8" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/bing-ads" connectionSpecification: @@ -880,10 +880,6 @@ - "client_id" - "refresh_token" - "reports_start_date" - - "hourly_reports" - - "daily_reports" - - "weekly_reports" - - "monthly_reports" additionalProperties: true properties: auth_method: @@ -934,38 +930,6 @@ \ Any data generated before this date will not be replicated in reports.\ \ This is a UTC date in YYYY-MM-DD format." order: 5 - hourly_reports: - title: "Enable hourly-aggregate reports" - type: "boolean" - description: "Toggle this to enable replicating reports aggregated using\ - \ an hourly time window. More information about report aggregation can\ - \ be found in the docs." - default: false - daily_reports: - title: "Enable daily-aggregate reports" - type: "boolean" - description: "Toggle this to enable replicating reports aggregated using\ - \ a daily time window. More information about report aggregation can be\ - \ found in the docs." - default: false - weekly_reports: - title: "Enable weekly-aggregate reports" - type: "boolean" - description: "Toggle this to enable replicating reports aggregated using\ - \ a weekly time window running from Sunday to Saturday. More information\ - \ about report aggregation can be found in the docs." - default: false - monthly_reports: - title: "Enable monthly-aggregate reports" - type: "boolean" - description: "Toggle this to enable replicating reports aggregated using\ - \ a monthly time window. More information about report aggregation can\ - \ be found in the docs." - default: false supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] @@ -1546,7 +1510,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-delighted:0.1.3" +- dockerImage: "airbyte/source-delighted:0.1.4" spec: documentationUrl: "https://docsurl.com" connectionSpecification: @@ -1559,15 +1523,19 @@ additionalProperties: false properties: since: - type: "integer" - description: "An Unix timestamp to retrieve records created on or after\ - \ this time." + title: "Since" + type: "string" + description: "The date from which you'd like to replicate the data" examples: - - 1625328167 + - "2022-05-30 04:50:23" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2} ([0-9]{2}:[0-9]{2}:[0-9]{2})?$" + order: 0 api_key: + title: "Delighted API Key" type: "string" description: "A Delighted API key." airbyte_secret: true + order: 1 supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] @@ -1604,6 +1572,27 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-dockerhub:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/dockerhub" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Dockerhub Spec" + type: "object" + required: + - "docker_username" + additionalProperties: false + properties: + docker_username: + type: "string" + description: "Username of DockerHub person or organization (for https://hub.docker.com/v2/repositories/USERNAME/\ + \ API call)" + pattern: "^[a-z0-9_\\-]+$" + examples: + - "airbyte" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-drift:0.2.5" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/drift" @@ -1687,7 +1676,7 @@ oauthFlowOutputParameters: - - "access_token" - - "refresh_token" -- dockerImage: "airbyte/source-e2e-test:2.1.0" +- dockerImage: "airbyte/source-e2e-test:2.1.1" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/e2e-test" connectionSpecification: @@ -1838,7 +1827,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-facebook-marketing:0.2.50" +- dockerImage: "airbyte/source-facebook-marketing:0.2.53" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/facebook-marketing" changelogUrl: "https://docs.airbyte.io/integrations/sources/facebook-marketing" @@ -2217,7 +2206,7 @@ oauthFlowInitParameters: [] oauthFlowOutputParameters: - - "access_token" -- dockerImage: "airbyte/source-faker:0.1.4" +- dockerImage: "airbyte/source-faker:0.1.5" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/faker" connectionSpecification: @@ -2595,7 +2584,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-github:0.2.33" +- dockerImage: "airbyte/source-github:0.2.38" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/github" connectionSpecification: @@ -2730,7 +2719,7 @@ path_in_connector_config: - "credentials" - "client_secret" -- dockerImage: "airbyte/source-gitlab:0.1.5" +- dockerImage: "airbyte/source-gitlab:0.1.6" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/gitlab" connectionSpecification: @@ -2780,7 +2769,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-google-ads:0.1.41" +- dockerImage: "airbyte/source-google-ads:0.1.42" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/google-ads" connectionSpecification: @@ -3067,6 +3056,65 @@ oauthFlowOutputParameters: - - "access_token" - - "refresh_token" +- dockerImage: "airbyte/source-google-analytics-data-api:0.0.1" + spec: + documentationUrl: "https://docsurl.com" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Google Analytics Data API Spec" + type: "object" + required: + - "property_id" + - "json_credentials" + - "report_name" + - "dimensions" + - "metrics" + - "date_ranges_start_date" + - "date_ranges_end_date" + additionalProperties: false + properties: + property_id: + type: "string" + title: "Property ID" + description: "A Google Analytics GA4 property identifier whose events are\ + \ tracked. Specified in the URL path and not the body" + order: 1 + json_credentials: + type: "string" + title: "JSON Credentials" + description: "The JSON key of the Service Account to use for authorization" + airbyte_secret: true + order: 2 + report_name: + type: "string" + title: "Report Name" + description: "The report name" + order: 3 + dimensions: + type: "string" + title: "Dimensions" + description: "Comma seprated report dimensions https://developers.google.com/analytics/devguides/reporting/data/v1/api-schema#dimensions" + order: 4 + metrics: + type: "string" + title: "Metrics" + description: "Comma seprated report metrics https://developers.google.com/analytics/devguides/reporting/data/v1/api-schema#metrics" + order: 5 + date_ranges_start_date: + type: "string" + title: "Date Range Start Date" + description: "The start date. One of the values Ndaysago, yesterday, today\ + \ or in the format YYYY-MM-DD" + order: 6 + date_ranges_end_date: + type: "string" + title: "Date Range End Date" + description: "The end date. One of the values Ndaysago, yesterday, today\ + \ or in the format YYYY-MM-DD" + order: 7 + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-google-directory:0.1.9" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/google-directory" @@ -3633,7 +3681,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-hubspot:0.1.68" +- dockerImage: "airbyte/source-hubspot:0.1.72" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/hubspot" connectionSpecification: @@ -3730,7 +3778,7 @@ - - "client_secret" oauthFlowOutputParameters: - - "refresh_token" -- dockerImage: "airbyte/source-db2:0.1.10" +- dockerImage: "airbyte/source-db2:0.1.11" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/db2" connectionSpecification: @@ -3861,7 +3909,7 @@ oauthFlowInitParameters: [] oauthFlowOutputParameters: - - "access_token" -- dockerImage: "airbyte/source-intercom:0.1.19" +- dockerImage: "airbyte/source-intercom:0.1.20" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/intercom" connectionSpecification: @@ -4053,7 +4101,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-kafka:0.1.6" +- dockerImage: "airbyte/source-kafka:0.1.7" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/kafka" connectionSpecification: @@ -4144,6 +4192,11 @@ \ and returns them incrementally from each poll." type: "integer" default: 500 + polling_time: + title: "Polling Time" + description: "Amount of time Kafka connector should try to poll for messages." + type: "integer" + default: 100 protocol: title: "Protocol" type: "object" @@ -4769,7 +4822,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-marketo:0.1.3" +- dockerImage: "airbyte/source-marketo:0.1.4" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/marketo" connectionSpecification: @@ -4820,7 +4873,45 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-mssql:0.4.2" +- dockerImage: "airbyte/source-metabase:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/metabase" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Metabase Source Spec" + type: "object" + required: + - "instance_api_url" + additionalProperties: true + properties: + instance_api_url: + type: "string" + title: "Metabase Instance API URL" + description: "URL to your metabase instance API" + examples: + - "http://localhost:3000/api/" + order: 0 + username: + type: "string" + order: 1 + password: + type: "string" + airbyte_secret: true + order: 2 + session_token: + type: "string" + description: "To generate your session token, you need to run the following\ + \ command: ``` curl -X POST \\\n -H \"Content-Type: application/json\"\ + \ \\\n -d '{\"username\": \"person@metabase.com\", \"password\": \"fakepassword\"\ + }' \\\n http://localhost:3000/api/session\n``` Then copy the value of\ + \ the `id` field returned by a successful call to that API.\nNote that\ + \ by default, sessions are good for 14 days and needs to be regenerated." + airbyte_secret: true + order: 3 + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-mssql:0.4.5" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/mssql" connectionSpecification: @@ -5445,7 +5536,7 @@ path_in_connector_config: - "credentials" - "client_secret" -- dockerImage: "airbyte/source-mongodb-v2:0.1.14" +- dockerImage: "airbyte/source-mongodb-v2:0.1.15" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/mongodb-v2" changelogUrl: "https://docs.airbyte.io/integrations/sources/mongodb-v2" @@ -5609,7 +5700,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-mysql:0.5.11" +- dockerImage: "airbyte/source-mysql:0.5.15" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/mysql" connectionSpecification: @@ -5880,7 +5971,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-openweather:0.1.4" +- dockerImage: "airbyte/source-openweather:0.1.5" spec: documentationUrl: "https://docsurl.com" connectionSpecification: @@ -5998,7 +6089,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-oracle:0.3.15" +- dockerImage: "airbyte/source-oracle:0.3.17" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/oracle" connectionSpecification: @@ -6016,6 +6107,7 @@ title: "Host" description: "Hostname of the database." type: "string" + order: 1 port: title: "Port" description: "Port of the database.\nOracle Corporations recommends the\ @@ -6026,18 +6118,22 @@ minimum: 0 maximum: 65536 default: 1521 + order: 2 sid: title: "SID (Oracle System Identifier)" type: "string" + order: 3 username: title: "User" description: "The username which is used to access the database." type: "string" + order: 4 password: title: "Password" description: "The password associated with the username." type: "string" airbyte_secret: true + order: 5 schemas: title: "Schemas" description: "The list of schemas to sync from. Defaults to user. Case sensitive." @@ -6046,12 +6142,20 @@ type: "string" minItems: 1 uniqueItems: true + order: 6 + jdbc_url_params: + title: "JDBC URL Params" + description: "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + type: "string" + order: 7 encryption: title: "Encryption" type: "object" description: "The encryption method with is used when communicating with\ \ the database." - order: 6 + order: 8 oneOf: - title: "Unencrypted" additionalProperties: false @@ -6268,6 +6372,41 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-orbit:0.1.1" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/orbit" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Orbit Source Spec" + type: "object" + required: + - "api_token" + - "workspace" + additionalProperties: false + properties: + api_token: + type: "string" + airbyte_secret: true + title: "API Token" + description: "Authorizes you to work with Orbit workspaces associated with\ + \ the token." + order: 0 + workspace: + type: "string" + title: "Workspace" + description: "The unique name of the workspace that your API token is associated\ + \ with." + order: 1 + start_date: + type: "string" + title: "Start Date" + description: "Date in the format 2022-06-26. Only load members whose last\ + \ activities are after this date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 2 + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-outreach:0.1.1" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/outreach" @@ -6364,7 +6503,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-paypal-transaction:0.1.5" +- dockerImage: "airbyte/source-paypal-transaction:0.1.6" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/paypal-transactions" connectionSpecification: @@ -6715,7 +6854,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-postgres:0.4.21" +- dockerImage: "airbyte/source-postgres:0.4.28" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/postgres" connectionSpecification: @@ -7646,7 +7785,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-salesforce:1.0.9" +- dockerImage: "airbyte/source-salesforce:1.0.10" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/salesforce" connectionSpecification: @@ -8854,7 +8993,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-tidb:0.1.1" +- dockerImage: "airbyte/source-tidb:0.1.2" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/tidb" connectionSpecification: @@ -9014,7 +9153,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-tiktok-marketing:0.1.12" +- dockerImage: "airbyte/source-tiktok-marketing:0.1.13" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/tiktok-marketing" changelogUrl: "https://docs.airbyte.io/integrations/sources/tiktok-marketing" @@ -9218,7 +9357,7 @@ oauthFlowOutputParameters: - - "token" - - "key" -- dockerImage: "airbyte/source-twilio:0.1.4" +- dockerImage: "airbyte/source-twilio:0.1.6" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/twilio" connectionSpecification: @@ -9236,11 +9375,13 @@ description: "Twilio account SID" airbyte_secret: true type: "string" + order: 1 auth_token: title: "Auth Token" description: "Twilio Auth Token." airbyte_secret: true type: "string" + order: 2 start_date: title: "Replication Start Date" description: "UTC date and time in the format 2020-10-01T00:00:00Z. Any\ @@ -9249,12 +9390,21 @@ examples: - "2020-10-01T00:00:00Z" type: "string" + order: 3 + lookback_window: + title: "Lookback window" + description: "How far into the past to look for records. (in minutes)" + examples: + - 60 + default: 0 + type: "integer" + order: 4 supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - "append" -- dockerImage: "airbyte/source-typeform:0.1.6" +- dockerImage: "airbyte/source-typeform:0.1.7" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/typeform" connectionSpecification: @@ -9415,7 +9565,36 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-zendesk-chat:0.1.7" +- dockerImage: "airbyte/source-webflow:0.1.1" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/webflow" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Webflow Spec" + type: "object" + required: + - "api_key" + - "site_id" + additionalProperties: false + properties: + site_id: + title: "Site id" + type: "string" + description: "The id of the Webflow site you are requesting data from. See\ + \ https://developers.webflow.com/#sites" + example: "a relatively long hex sequence" + order: 0 + api_key: + title: "API token" + type: "string" + description: "The API token for authenticating to Webflow. See https://university.webflow.com/lesson/intro-to-the-webflow-api" + example: "a very long hex sequence" + order: 1 + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-zendesk-chat:0.1.8" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/zendesk-chat" connectionSpecification: @@ -9674,7 +9853,7 @@ path_in_connector_config: - "credentials" - "client_secret" -- dockerImage: "airbyte/source-zendesk-support:0.2.9" +- dockerImage: "airbyte/source-zendesk-support:0.2.11" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/zendesk-support" connectionSpecification: @@ -10114,7 +10293,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-sftp:0.1.1" +- dockerImage: "airbyte/source-sftp:0.1.2" spec: documentationUrl: "https://docs.airbyte.io/integrations/source/sftp" connectionSpecification: @@ -10218,3 +10397,47 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-firebolt:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/firebolt" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Firebolt Spec" + type: "object" + required: + - "username" + - "password" + - "database" + additionalProperties: false + properties: + username: + type: "string" + title: "Username" + description: "Firebolt email address you use to login." + examples: + - "username@email.com" + password: + type: "string" + title: "Password" + description: "Firebolt password." + account: + type: "string" + title: "Account" + description: "Firebolt account to login." + host: + type: "string" + title: "Host" + description: "The host name of your Firebolt database." + examples: + - "api.app.firebolt.io" + database: + type: "string" + title: "Database" + description: "The database to connect to." + engine: + type: "string" + title: "Engine" + description: "Engine name or url to connect to." + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] diff --git a/airbyte-config/specs/src/main/java/io/airbyte/config/specs/GcsBucketSpecFetcher.java b/airbyte-config/specs/src/main/java/io/airbyte/config/specs/GcsBucketSpecFetcher.java index 06618694cf5e..f20bbf64adb6 100644 --- a/airbyte-config/specs/src/main/java/io/airbyte/config/specs/GcsBucketSpecFetcher.java +++ b/airbyte-config/specs/src/main/java/io/airbyte/config/specs/GcsBucketSpecFetcher.java @@ -8,7 +8,9 @@ import com.google.api.client.util.Preconditions; import com.google.cloud.storage.Blob; import com.google.cloud.storage.Storage; +import com.google.common.annotations.VisibleForTesting; import io.airbyte.commons.json.Jsons; +import io.airbyte.config.Configs.DeploymentMode; import io.airbyte.protocol.models.AirbyteProtocolSchema; import io.airbyte.protocol.models.ConnectorSpecification; import io.airbyte.validation.json.JsonSchemaValidator; @@ -23,12 +25,27 @@ public class GcsBucketSpecFetcher { private static final Logger LOGGER = LoggerFactory.getLogger(GcsBucketSpecFetcher.class); + // these filenames must match default_spec_file and cloud_spec_file in manage.sh + public static final String DEFAULT_SPEC_FILE = "spec.json"; + public static final String CLOUD_SPEC_FILE = "spec.cloud.json"; + private final Storage storage; private final String bucketName; + private final DeploymentMode deploymentMode; public GcsBucketSpecFetcher(final Storage storage, final String bucketName) { this.storage = storage; this.bucketName = bucketName; + this.deploymentMode = DeploymentMode.OSS; + } + + /** + * This constructor is used by airbyte-cloud to fetch cloud-specific spec files. + */ + public GcsBucketSpecFetcher(final Storage storage, final String bucketName, final DeploymentMode deploymentMode) { + this.storage = storage; + this.bucketName = bucketName; + this.deploymentMode = deploymentMode; } public String getBucketName() { @@ -41,17 +58,14 @@ public Optional attemptFetch(final String dockerImage) { final String dockerImageName = dockerImageComponents[0]; final String dockerImageTag = dockerImageComponents[1]; - final Path specPath = Path.of("specs").resolve(dockerImageName).resolve(dockerImageTag).resolve("spec.json"); - LOGGER.debug("Checking path for cached spec: {} {}", bucketName, specPath); - final Blob specAsBlob = storage.get(bucketName, specPath.toString()); + final Optional specAsBlob = getSpecAsBlob(dockerImageName, dockerImageTag); - // if null it means the object was not found. - if (specAsBlob == null) { + if (specAsBlob.isEmpty()) { LOGGER.debug("Spec not found in bucket storage"); return Optional.empty(); } - final String specAsString = new String(specAsBlob.getContent(), StandardCharsets.UTF_8); + final String specAsString = new String(specAsBlob.get().getContent(), StandardCharsets.UTF_8); try { validateConfig(Jsons.deserialize(specAsString)); } catch (final JsonValidationException e) { @@ -61,6 +75,32 @@ public Optional attemptFetch(final String dockerImage) { return Optional.of(Jsons.deserialize(specAsString, ConnectorSpecification.class)); } + @VisibleForTesting + Optional getSpecAsBlob(final String dockerImageName, final String dockerImageTag) { + if (deploymentMode == DeploymentMode.CLOUD) { + final Optional cloudSpecAsBlob = getSpecAsBlob(dockerImageName, dockerImageTag, CLOUD_SPEC_FILE, DeploymentMode.CLOUD); + if (cloudSpecAsBlob.isPresent()) { + LOGGER.info("Found cloud specific spec: {} {}", bucketName, cloudSpecAsBlob); + return cloudSpecAsBlob; + } + } + return getSpecAsBlob(dockerImageName, dockerImageTag, DEFAULT_SPEC_FILE, DeploymentMode.OSS); + } + + @VisibleForTesting + Optional getSpecAsBlob(final String dockerImageName, + final String dockerImageTag, + final String specFile, + final DeploymentMode deploymentMode) { + final Path specPath = Path.of("specs").resolve(dockerImageName).resolve(dockerImageTag).resolve(specFile); + LOGGER.debug("Checking path for cached {} spec: {} {}", deploymentMode.name(), bucketName, specPath); + final Blob specAsBlob = storage.get(bucketName, specPath.toString()); + if (specAsBlob != null) { + return Optional.of(specAsBlob); + } + return Optional.empty(); + } + private static void validateConfig(final JsonNode json) throws JsonValidationException { final JsonSchemaValidator jsonSchemaValidator = new JsonSchemaValidator(); final JsonNode specJsonSchema = JsonSchemaValidator.getSchema(AirbyteProtocolSchema.PROTOCOL.getFile(), "ConnectorSpecification"); diff --git a/airbyte-config/specs/src/test/java/io/airbyte/config/specs/GcsBucketSpecFetcherTest.java b/airbyte-config/specs/src/test/java/io/airbyte/config/specs/GcsBucketSpecFetcherTest.java index 84956ccf67c5..8ff403f089b1 100644 --- a/airbyte-config/specs/src/test/java/io/airbyte/config/specs/GcsBucketSpecFetcherTest.java +++ b/airbyte-config/specs/src/test/java/io/airbyte/config/specs/GcsBucketSpecFetcherTest.java @@ -4,6 +4,8 @@ package io.airbyte.config.specs; +import static io.airbyte.config.specs.GcsBucketSpecFetcher.CLOUD_SPEC_FILE; +import static io.airbyte.config.specs.GcsBucketSpecFetcher.DEFAULT_SPEC_FILE; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.mock; @@ -13,8 +15,8 @@ import com.google.cloud.storage.Storage; import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; +import io.airbyte.config.Configs.DeploymentMode; import io.airbyte.protocol.models.ConnectorSpecification; -import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Path; import java.util.Optional; @@ -27,36 +29,43 @@ class GcsBucketSpecFetcherTest { private static final String DOCKER_REPOSITORY = "image"; private static final String DOCKER_IMAGE_TAG = "0.1.0"; private static final String DOCKER_IMAGE = DOCKER_REPOSITORY + ":" + DOCKER_IMAGE_TAG; - private static final String SPEC_PATH = Path.of("specs").resolve(DOCKER_REPOSITORY).resolve(DOCKER_IMAGE_TAG).resolve("spec.json").toString(); + private static final String DEFAULT_SPEC_PATH = Path.of("specs") + .resolve(DOCKER_REPOSITORY).resolve(DOCKER_IMAGE_TAG).resolve(DEFAULT_SPEC_FILE).toString(); + private static final String CLOUD_SPEC_PATH = Path.of("specs") + .resolve(DOCKER_REPOSITORY).resolve(DOCKER_IMAGE_TAG).resolve(CLOUD_SPEC_FILE).toString(); private Storage storage; - private Blob specBlob; - private final ConnectorSpecification spec = new ConnectorSpecification().withConnectionSpecification(Jsons.jsonNode(ImmutableMap.of("foo", "bar"))); + private Blob defaultSpecBlob; + private Blob cloudSpecBlob; + private final ConnectorSpecification defaultSpec = new ConnectorSpecification() + .withConnectionSpecification(Jsons.jsonNode(ImmutableMap.of("foo", "bar", "mode", "oss"))); + private final ConnectorSpecification cloudSpec = new ConnectorSpecification() + .withConnectionSpecification(Jsons.jsonNode(ImmutableMap.of("foo", "bar", "mode", "cloud"))); - @SuppressWarnings("unchecked") @BeforeEach - void setup() throws IOException { + void setup() { storage = mock(Storage.class); - final byte[] specBytes = Jsons.toBytes(Jsons.jsonNode(spec)); - specBlob = mock(Blob.class); - when(specBlob.getContent()).thenReturn(specBytes); + defaultSpecBlob = mock(Blob.class); + when(defaultSpecBlob.getContent()).thenReturn(Jsons.toBytes(Jsons.jsonNode(defaultSpec))); + cloudSpecBlob = mock(Blob.class); + when(cloudSpecBlob.getContent()).thenReturn(Jsons.toBytes(Jsons.jsonNode(cloudSpec))); } @Test - void testGetsSpecIfPresent() throws IOException { - when(storage.get(BUCKET_NAME, SPEC_PATH)).thenReturn(specBlob); + void testGetsSpecIfPresent() { + when(storage.get(BUCKET_NAME, DEFAULT_SPEC_PATH)).thenReturn(defaultSpecBlob); final GcsBucketSpecFetcher bucketSpecFetcher = new GcsBucketSpecFetcher(storage, BUCKET_NAME); final Optional returnedSpec = bucketSpecFetcher.attemptFetch(DOCKER_IMAGE); assertTrue(returnedSpec.isPresent()); - assertEquals(spec, returnedSpec.get()); + assertEquals(defaultSpec, returnedSpec.get()); } @Test - void testReturnsEmptyIfNotPresent() throws IOException { - when(storage.get(BUCKET_NAME, SPEC_PATH)).thenReturn(null); + void testReturnsEmptyIfNotPresent() { + when(storage.get(BUCKET_NAME, DEFAULT_SPEC_PATH)).thenReturn(null); final GcsBucketSpecFetcher bucketSpecFetcher = new GcsBucketSpecFetcher(storage, BUCKET_NAME); final Optional returnedSpec = bucketSpecFetcher.attemptFetch(DOCKER_IMAGE); @@ -65,10 +74,10 @@ void testReturnsEmptyIfNotPresent() throws IOException { } @Test - void testReturnsEmptyIfInvalidSpec() throws IOException { + void testReturnsEmptyIfInvalidSpec() { final Blob invalidSpecBlob = mock(Blob.class); when(invalidSpecBlob.getContent()).thenReturn("{\"notASpec\": true}".getBytes(StandardCharsets.UTF_8)); - when(storage.get(BUCKET_NAME, SPEC_PATH)).thenReturn(invalidSpecBlob); + when(storage.get(BUCKET_NAME, DEFAULT_SPEC_PATH)).thenReturn(invalidSpecBlob); final GcsBucketSpecFetcher bucketSpecFetcher = new GcsBucketSpecFetcher(storage, BUCKET_NAME); final Optional returnedSpec = bucketSpecFetcher.attemptFetch(DOCKER_IMAGE); @@ -76,4 +85,42 @@ void testReturnsEmptyIfInvalidSpec() throws IOException { assertTrue(returnedSpec.isEmpty()); } + /** + * Test {@link GcsBucketSpecFetcher#getSpecAsBlob(String, String)}. + */ + @Test + void testDynamicGetSpecAsBlob() { + when(storage.get(BUCKET_NAME, DEFAULT_SPEC_PATH)).thenReturn(defaultSpecBlob); + when(storage.get(BUCKET_NAME, CLOUD_SPEC_PATH)).thenReturn(cloudSpecBlob); + + // under deploy deployment mode, cloud spec file will be ignored even when it exists + final GcsBucketSpecFetcher defaultBucketSpecFetcher = new GcsBucketSpecFetcher(storage, BUCKET_NAME); + assertEquals(Optional.of(defaultSpecBlob), + defaultBucketSpecFetcher.getSpecAsBlob(DOCKER_REPOSITORY, DOCKER_IMAGE_TAG)); + + // under OSS deployment mode, cloud spec file will be ignored even when it exists + final GcsBucketSpecFetcher ossBucketSpecFetcher = new GcsBucketSpecFetcher(storage, BUCKET_NAME, DeploymentMode.OSS); + assertEquals(Optional.of(defaultSpecBlob), + ossBucketSpecFetcher.getSpecAsBlob(DOCKER_REPOSITORY, DOCKER_IMAGE_TAG)); + + final GcsBucketSpecFetcher cloudBucketSpecFetcher = new GcsBucketSpecFetcher(storage, BUCKET_NAME, DeploymentMode.CLOUD); + assertEquals(Optional.of(cloudSpecBlob), + cloudBucketSpecFetcher.getSpecAsBlob(DOCKER_REPOSITORY, DOCKER_IMAGE_TAG)); + } + + /** + * Test {@link GcsBucketSpecFetcher#getSpecAsBlob(String, String, String, DeploymentMode)}. + */ + @Test + void testBasicGetSpecAsBlob() { + when(storage.get(BUCKET_NAME, DEFAULT_SPEC_PATH)).thenReturn(defaultSpecBlob); + when(storage.get(BUCKET_NAME, CLOUD_SPEC_PATH)).thenReturn(cloudSpecBlob); + + final GcsBucketSpecFetcher bucketSpecFetcher = new GcsBucketSpecFetcher(storage, BUCKET_NAME); + assertEquals(Optional.of(defaultSpecBlob), + bucketSpecFetcher.getSpecAsBlob(DOCKER_REPOSITORY, DOCKER_IMAGE_TAG, DEFAULT_SPEC_FILE, DeploymentMode.OSS)); + assertEquals(Optional.of(cloudSpecBlob), + bucketSpecFetcher.getSpecAsBlob(DOCKER_REPOSITORY, DOCKER_IMAGE_TAG, CLOUD_SPEC_FILE, DeploymentMode.OSS)); + } + } diff --git a/airbyte-container-orchestrator/Dockerfile b/airbyte-container-orchestrator/Dockerfile index 2af74adcaf39..d5f788129e45 100644 --- a/airbyte-container-orchestrator/Dockerfile +++ b/airbyte-container-orchestrator/Dockerfile @@ -28,7 +28,7 @@ RUN echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] htt RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y kubectl # Don't change this manually. Bump version expects to make moves based on this string -ARG VERSION=0.39.17-alpha +ARG VERSION=0.39.28-alpha ENV APPLICATION airbyte-container-orchestrator ENV VERSION=${VERSION} diff --git a/airbyte-container-orchestrator/build.gradle b/airbyte-container-orchestrator/build.gradle index ef6b2fe48cde..d694f6466ecd 100644 --- a/airbyte-container-orchestrator/build.gradle +++ b/airbyte-container-orchestrator/build.gradle @@ -21,8 +21,8 @@ dependencies { testImplementation 'org.mockito:mockito-inline:2.13.0' testImplementation libs.postgresql - testImplementation libs.testcontainers - testImplementation libs.testcontainers.postgresql + testImplementation libs.platform.testcontainers + testImplementation libs.platform.testcontainers.postgresql testImplementation project(':airbyte-commons-docker') } diff --git a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/ContainerOrchestratorApp.java b/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/ContainerOrchestratorApp.java index 3fa174a57622..1a9a95403760 100644 --- a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/ContainerOrchestratorApp.java +++ b/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/ContainerOrchestratorApp.java @@ -4,6 +4,8 @@ package io.airbyte.container_orchestrator; +import io.airbyte.commons.features.EnvVariableFeatureFlags; +import io.airbyte.commons.features.FeatureFlags; import io.airbyte.commons.logging.LoggingHelper; import io.airbyte.commons.logging.MdcScope; import io.airbyte.config.Configs; @@ -60,17 +62,20 @@ public class ContainerOrchestratorApp { private final JobRunConfig jobRunConfig; private final KubePodInfo kubePodInfo; private final Configs configs; + private final FeatureFlags featureFlags; public ContainerOrchestratorApp( final String application, final Map envMap, final JobRunConfig jobRunConfig, - final KubePodInfo kubePodInfo) { + final KubePodInfo kubePodInfo, + final FeatureFlags featureFlags) { this.application = application; this.envMap = envMap; this.jobRunConfig = jobRunConfig; this.kubePodInfo = kubePodInfo; this.configs = new EnvConfigs(envMap); + this.featureFlags = featureFlags; } private void configureLogging() { @@ -102,7 +107,7 @@ private void runInternal(final DefaultAsyncStateManager asyncStateManager) { final WorkerConfigs workerConfigs = new WorkerConfigs(configs); final ProcessFactory processFactory = getProcessBuilderFactory(configs, workerConfigs); - final JobOrchestrator jobOrchestrator = getJobOrchestrator(configs, workerConfigs, processFactory, application); + final JobOrchestrator jobOrchestrator = getJobOrchestrator(configs, workerConfigs, processFactory, application, featureFlags); if (jobOrchestrator == null) { throw new IllegalStateException("Could not find job orchestrator for application: " + application); @@ -174,8 +179,9 @@ public static void main(final String[] args) { final var envMap = JobOrchestrator.readEnvMap(); final var jobRunConfig = JobOrchestrator.readJobRunConfig(); final var kubePodInfo = JobOrchestrator.readKubePodInfo(); + final FeatureFlags featureFlags = new EnvVariableFeatureFlags(); - final var app = new ContainerOrchestratorApp(applicationName, envMap, jobRunConfig, kubePodInfo); + final var app = new ContainerOrchestratorApp(applicationName, envMap, jobRunConfig, kubePodInfo, featureFlags); app.run(); } catch (final Throwable t) { log.error("Orchestrator failed...", t); @@ -187,10 +193,11 @@ public static void main(final String[] args) { private static JobOrchestrator getJobOrchestrator(final Configs configs, final WorkerConfigs workerConfigs, final ProcessFactory processFactory, - final String application) { + final String application, + final FeatureFlags featureFlags) { return switch (application) { - case ReplicationLauncherWorker.REPLICATION -> new ReplicationJobOrchestrator(configs, workerConfigs, processFactory); + case ReplicationLauncherWorker.REPLICATION -> new ReplicationJobOrchestrator(configs, workerConfigs, processFactory, featureFlags); case NormalizationLauncherWorker.NORMALIZATION -> new NormalizationJobOrchestrator(configs, workerConfigs, processFactory); case DbtLauncherWorker.DBT -> new DbtJobOrchestrator(configs, workerConfigs, processFactory); case AsyncOrchestratorPodProcess.NO_OP -> new NoOpOrchestrator(); diff --git a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/ReplicationJobOrchestrator.java b/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/ReplicationJobOrchestrator.java index 9f153e7e87d3..fd7017bf82ba 100644 --- a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/ReplicationJobOrchestrator.java +++ b/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/ReplicationJobOrchestrator.java @@ -4,6 +4,7 @@ package io.airbyte.container_orchestrator; +import io.airbyte.commons.features.FeatureFlags; import io.airbyte.commons.json.Jsons; import io.airbyte.config.Configs; import io.airbyte.config.ReplicationOutput; @@ -37,11 +38,16 @@ public class ReplicationJobOrchestrator implements JobOrchestrator runJob() throws Exception { log.info("Setting up source..."); // reset jobs use an empty source to induce resetting all data in destination. final AirbyteSource airbyteSource = - sourceLauncherConfig.getDockerImage().equals(WorkerConstants.RESET_JOB_SOURCE_DOCKER_IMAGE_STUB) ? new EmptyAirbyteSource() + sourceLauncherConfig.getDockerImage().equals(WorkerConstants.RESET_JOB_SOURCE_DOCKER_IMAGE_STUB) ? new EmptyAirbyteSource( + featureFlags.useStreamCapableState()) : new DefaultAirbyteSource(workerConfigs, sourceLauncher); log.info("Setting up replication worker..."); diff --git a/airbyte-db/db-lib/README.md b/airbyte-db/db-lib/README.md index 087a051920e4..22d3dca69642 100644 --- a/airbyte-db/db-lib/README.md +++ b/airbyte-db/db-lib/README.md @@ -21,15 +21,15 @@ Check `io.airbyte.db.instance.configs` for example. # How to Write a Migration - Run the `newMigration` command to create a new migration file in `io.airbyte.db.instance..migrations`. - - Configs database: `./gradlew :airbyte-db:lib:newConfigsMigration`. - - Jobs database: `./gradlew :airbyte-db:lib:newJobsMigration`. + - Configs database: `./gradlew :airbyte-db:db-lib:newConfigsMigration`. + - Jobs database: `./gradlew :airbyte-db:db-lib:newJobsMigration`. - Write the migration using [`jOOQ`](https://www.jooq.org/). - Use the `runMigration` command to apply your newly written migration if you want to test it. - - Configs database: `./gradlew :airbyte-db:lib:runConfigsMigration`. - - Jobs database: `./gradlew :airbyte-db:lib:runJobsMigration`. + - Configs database: `./gradlew :airbyte-db:db-lib:runConfigsMigration`. + - Jobs database: `./gradlew :airbyte-db:db-lib:runJobsMigration`. - Run the `dumpSchema` command to update the database schema. - - Configs database: `./gradlew :airbyte-db:lib:dumpConfigsSchema` - - Jobs database: `./gradlew :airbyte-db:lib:dumpJobsSchema` + - Configs database: `./gradlew :airbyte-db:db-lib:dumpConfigsSchema` + - Jobs database: `./gradlew :airbyte-db:db-lib:dumpJobsSchema` ## Migration Filename - The name of the file should follow this pattern: `V(version)__(migration_description_in_snake_case).java`. diff --git a/airbyte-db/db-lib/build.gradle b/airbyte-db/db-lib/build.gradle index 136f41452985..80b1fc5bca94 100644 --- a/airbyte-db/db-lib/build.gradle +++ b/airbyte-db/db-lib/build.gradle @@ -12,7 +12,10 @@ dependencies { implementation project(':airbyte-json-validation') implementation project(':airbyte-config:config-models') implementation libs.flyway.core - implementation libs.testcontainers.postgresql + + // Mark as compile only to avoid leaking transitively to connectors + compileOnly libs.platform.testcontainers.postgresql + // These are required because gradle might be using lower version of Jna from other // library transitive dependency. Can be removed if we can figure out which library is the cause. // Refer: https://github.com/testcontainers/testcontainers-java/issues/3834#issuecomment-825409079 @@ -21,6 +24,7 @@ dependencies { testImplementation project(':airbyte-test-utils') testImplementation 'org.apache.commons:commons-lang3:3.11' + testImplementation libs.platform.testcontainers.postgresql // Big Query implementation('com.google.cloud:google-cloud-bigquery:1.133.1') diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/DataTypeUtils.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/DataTypeUtils.java index d63f91d4700f..707946df2c6b 100644 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/DataTypeUtils.java +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/DataTypeUtils.java @@ -25,6 +25,11 @@ public class DataTypeUtils { public static final String DATE_FORMAT_WITH_MILLISECONDS_PATTERN = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"; + public static final DateTimeFormatter TIME_FORMATTER = DateTimeFormatter.ofPattern("HH:mm:ss.SSSSSS"); + public static final DateTimeFormatter TIMESTAMP_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSSSS"); + public static final DateTimeFormatter TIMETZ_FORMATTER = DateTimeFormatter.ofPattern("HH:mm:ss.SSSSSSXXX"); + public static final DateTimeFormatter TIMESTAMPTZ_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSSSSXXX"); + // wrap SimpleDateFormat in a function because SimpleDateFormat is not threadsafe as a static final. public static DateFormat getDateFormat() { return new SimpleDateFormat(DATE_FORMAT_PATTERN); // Quoted "Z" to indicate UTC, no timezone offset; diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/bigquery/BigQuerySourceOperations.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/bigquery/BigQuerySourceOperations.java index f71bad6ff012..8581885528b2 100644 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/bigquery/BigQuerySourceOperations.java +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/bigquery/BigQuerySourceOperations.java @@ -9,6 +9,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ContainerNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.cloud.bigquery.Field; import com.google.cloud.bigquery.FieldList; @@ -19,6 +20,7 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.db.DataTypeUtils; import io.airbyte.db.SourceOperations; +import io.airbyte.db.util.JsonUtil; import io.airbyte.protocol.models.JsonSchemaType; import java.text.DateFormat; import java.text.ParseException; @@ -43,20 +45,19 @@ public JsonNode rowToJson(final BigQueryResultSet bigQueryResultSet) { return jsonNode; } - private void fillObjectNode(final String fieldName, final StandardSQLTypeName fieldType, final FieldValue fieldValue, final ObjectNode node) { + private void fillObjectNode(final String fieldName, final StandardSQLTypeName fieldType, final FieldValue fieldValue, final ContainerNode node) { switch (fieldType) { - case BOOL -> node.put(fieldName, fieldValue.getBooleanValue()); - case INT64 -> node.put(fieldName, fieldValue.getLongValue()); - case FLOAT64 -> node.put(fieldName, fieldValue.getDoubleValue()); - case NUMERIC -> node.put(fieldName, fieldValue.getNumericValue()); - case BIGNUMERIC -> node.put(fieldName, returnNullIfInvalid(fieldValue::getNumericValue)); - case STRING -> node.put(fieldName, fieldValue.getStringValue()); - case BYTES -> node.put(fieldName, fieldValue.getBytesValue()); - case DATE -> node.put(fieldName, toISO8601String(getDateValue(fieldValue, BIG_QUERY_DATE_FORMAT))); - case DATETIME -> node.put(fieldName, toISO8601String(getDateValue(fieldValue, BIG_QUERY_DATETIME_FORMAT))); - case TIMESTAMP -> node.put(fieldName, toISO8601String(fieldValue.getTimestampValue() / 1000)); - case TIME -> node.put(fieldName, fieldValue.getStringValue()); - default -> node.put(fieldName, fieldValue.getStringValue()); + case BOOL -> JsonUtil.putBooleanValueIntoJson(node, fieldValue.getBooleanValue(), fieldName); + case INT64 -> JsonUtil.putLongValueIntoJson(node, fieldValue.getLongValue(), fieldName); + case FLOAT64 -> JsonUtil.putDoubleValueIntoJson(node, fieldValue.getDoubleValue(), fieldName); + case NUMERIC -> JsonUtil.putBigDecimalValueIntoJson(node, fieldValue.getNumericValue(), fieldName); + case BIGNUMERIC -> JsonUtil.putBigDecimalValueIntoJson(node, returnNullIfInvalid(fieldValue::getNumericValue), fieldName); + case STRING, TIME -> JsonUtil.putStringValueIntoJson(node, fieldValue.getStringValue(), fieldName); + case BYTES -> JsonUtil.putBytesValueIntoJson(node, fieldValue.getBytesValue(), fieldName); + case DATE -> JsonUtil.putStringValueIntoJson(node, toISO8601String(getDateValue(fieldValue, BIG_QUERY_DATE_FORMAT)), fieldName); + case DATETIME -> JsonUtil.putStringValueIntoJson(node, toISO8601String(getDateValue(fieldValue, BIG_QUERY_DATETIME_FORMAT)), fieldName); + case TIMESTAMP -> JsonUtil.putStringValueIntoJson(node, toISO8601String(fieldValue.getTimestampValue() / 1000), fieldName); + default -> JsonUtil.putStringValueIntoJson(node, fieldValue.getStringValue(), fieldName); } } @@ -74,7 +75,7 @@ private void setJsonField(final Field field, final FieldValue fieldValue, final final FieldList subFields = field.getSubFields(); // Array of primitive if (subFields == null || subFields.isEmpty()) { - fieldValue.getRepeatedValue().forEach(arrayFieldValue -> fillObjectNode(fieldName, fieldType, arrayFieldValue, arrayNode.addObject())); + fieldValue.getRepeatedValue().forEach(arrayFieldValue -> fillObjectNode(fieldName, fieldType, arrayFieldValue, arrayNode)); // Array of records } else { for (final FieldValue arrayFieldValue : fieldValue.getRepeatedValue()) { diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_39_17_001__AddStreamDescriptorsToStateTable.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_39_17_001__AddStreamDescriptorsToStateTable.java new file mode 100644 index 000000000000..5505378858c0 --- /dev/null +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_39_17_001__AddStreamDescriptorsToStateTable.java @@ -0,0 +1,99 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.configs.migrations; + +import com.google.common.annotations.VisibleForTesting; +import java.util.Arrays; +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; +import org.jooq.Catalog; +import org.jooq.DSLContext; +import org.jooq.EnumType; +import org.jooq.Schema; +import org.jooq.impl.DSL; +import org.jooq.impl.SQLDataType; +import org.jooq.impl.SchemaImpl; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class V0_39_17_001__AddStreamDescriptorsToStateTable extends BaseJavaMigration { + + private static final Logger LOGGER = LoggerFactory.getLogger(V0_39_17_001__AddStreamDescriptorsToStateTable.class); + + @Override + public void migrate(final Context context) throws Exception { + LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); + + // Warning: please do not use any jOOQ generated code to write a migration. + // As database schema changes, the generated jOOQ code can be deprecated. So + // old migration may not compile if there is any generated code. + final DSLContext ctx = DSL.using(context.getConnection()); + + migrate(ctx); + } + + @VisibleForTesting + public static void migrate(final DSLContext ctx) { + createStateTypeEnum(ctx); + addStreamDescriptorFieldsToStateTable(ctx); + } + + private static void createStateTypeEnum(final DSLContext ctx) { + ctx.createType(StateType.NAME) + .asEnum(Arrays.stream(StateType.values()).map(StateType::getLiteral).toList()) + .execute(); + } + + private static void addStreamDescriptorFieldsToStateTable(final DSLContext ctx) { + final String STATE_TABLE = "state"; + + ctx.alterTable(STATE_TABLE) + .add(Arrays.asList( + DSL.field("stream_name", SQLDataType.CLOB.nullable(true)), + DSL.field("namespace", SQLDataType.CLOB.nullable(true)), + // type defaults to LEGACY to first set the expected type of all existing states + DSL.field("type", SQLDataType.VARCHAR.asEnumDataType(StateType.class).nullable(false).defaultValue(StateType.LEGACY)), + DSL.constraint("state__connection_id__stream_name__namespace__uq") + .unique(DSL.field("connection_id"), DSL.field("stream_name"), DSL.field("namespace")))) + .execute(); + } + + public enum StateType implements EnumType { + + GLOBAL("GLOBAL"), + STREAM("STREAM"), + LEGACY("LEGACY"); + + public static final String NAME = "state_type"; + + StateType(String literal) { + this.literal = literal; + } + + @Override + public String getLiteral() { + return literal; + } + + @Override + public Catalog getCatalog() { + return getSchema().getCatalog(); + } + + @Override + public Schema getSchema() { + return new SchemaImpl(DSL.name("public")); + } + + @Override + public String getName() { + return NAME; + } + + private final String literal; + + } + +} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/jdbc/AbstractJdbcCompatibleSourceOperations.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/jdbc/AbstractJdbcCompatibleSourceOperations.java index abe9115b75c4..ea4910c16518 100644 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/jdbc/AbstractJdbcCompatibleSourceOperations.java +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/jdbc/AbstractJdbcCompatibleSourceOperations.java @@ -4,6 +4,9 @@ package io.airbyte.db.jdbc; +import static io.airbyte.db.DataTypeUtils.TIMESTAMPTZ_FORMATTER; +import static io.airbyte.db.DataTypeUtils.TIMETZ_FORMATTER; + import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ArrayNode; @@ -256,13 +259,13 @@ protected DateTime getDateTimeObject(ResultSet resultSet, int index, protected void putTimeWithTimezone(ObjectNode node, String columnName, ResultSet resultSet, int index) throws SQLException { OffsetTime timetz = getDateTimeObject(resultSet, index, OffsetTime.class); - node.put(columnName, timetz.toString()); + node.put(columnName, timetz.format(TIMETZ_FORMATTER)); } protected void putTimestampWithTimezone(ObjectNode node, String columnName, ResultSet resultSet, int index) throws SQLException { OffsetDateTime timestamptz = getDateTimeObject(resultSet, index, OffsetDateTime.class); LocalDate localDate = timestamptz.toLocalDate(); - node.put(columnName, resolveEra(localDate, timestamptz.toString())); + node.put(columnName, resolveEra(localDate, timestamptz.format(TIMESTAMPTZ_FORMATTER))); } protected String resolveEra(LocalDate date, String value) { diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/util/JsonUtil.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/util/JsonUtil.java new file mode 100644 index 000000000000..7dbb1254222d --- /dev/null +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/util/JsonUtil.java @@ -0,0 +1,74 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.util; + +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ContainerNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import java.math.BigDecimal; + +public class JsonUtil { + + public static void putBooleanValueIntoJson(final ContainerNode node, final boolean value, final String fieldName) { + if (node instanceof ArrayNode) { + ((ArrayNode) node).add(value); + } else if (node instanceof ObjectNode) { + ((ObjectNode) node).put(fieldName, value); + } else { + throw new RuntimeException("Can't populate the node type : " + node.getClass().getName()); + } + } + + public static void putLongValueIntoJson(final ContainerNode node, final long value, final String fieldName) { + if (node instanceof ArrayNode) { + ((ArrayNode) node).add(value); + } else if (node instanceof ObjectNode) { + ((ObjectNode) node).put(fieldName, value); + } else { + throw new RuntimeException("Can't populate the node type : " + node.getClass().getName()); + } + } + + public static void putDoubleValueIntoJson(final ContainerNode node, final double value, final String fieldName) { + if (node instanceof ArrayNode) { + ((ArrayNode) node).add(value); + } else if (node instanceof ObjectNode) { + ((ObjectNode) node).put(fieldName, value); + } else { + throw new RuntimeException("Can't populate the node type : " + node.getClass().getName()); + } + } + + public static void putBigDecimalValueIntoJson(final ContainerNode node, final BigDecimal value, final String fieldName) { + if (node instanceof ArrayNode) { + ((ArrayNode) node).add(value); + } else if (node instanceof ObjectNode) { + ((ObjectNode) node).put(fieldName, value); + } else { + throw new RuntimeException("Can't populate the node type : " + node.getClass().getName()); + } + } + + public static void putStringValueIntoJson(final ContainerNode node, final String value, final String fieldName) { + if (node instanceof ArrayNode) { + ((ArrayNode) node).add(value); + } else if (node instanceof ObjectNode) { + ((ObjectNode) node).put(fieldName, value); + } else { + throw new RuntimeException("Can't populate the node type : " + node.getClass().getName()); + } + } + + public static void putBytesValueIntoJson(final ContainerNode node, final byte[] value, final String fieldName) { + if (node instanceof ArrayNode) { + ((ArrayNode) node).add(value); + } else if (node instanceof ObjectNode) { + ((ObjectNode) node).put(fieldName, value); + } else { + throw new RuntimeException("Can't populate the node type : " + node.getClass().getName()); + } + } + +} diff --git a/airbyte-db/db-lib/src/main/resources/configs_database/schema_dump.txt b/airbyte-db/db-lib/src/main/resources/configs_database/schema_dump.txt index fc109b9dd411..97a509d15966 100644 --- a/airbyte-db/db-lib/src/main/resources/configs_database/schema_dump.txt +++ b/airbyte-db/db-lib/src/main/resources/configs_database/schema_dump.txt @@ -138,6 +138,9 @@ create table "public"."state"( "state" jsonb null, "created_at" timestamptz(35) not null default null, "updated_at" timestamptz(35) not null default null, + "stream_name" text null, + "namespace" text null, + "type" state_type not null default null, constraint "state_pkey" primary key ( "id", @@ -276,6 +279,11 @@ create unique index "connection_operation_pkey" on "public"."connection_operatio "operation_id" asc ); create unique index "operation_pkey" on "public"."operation"("id" asc); +create unique index "state__connection_id__stream_name__namespace__uq" on "public"."state"( + "connection_id" asc, + "stream_name" asc, + "namespace" asc +); create unique index "state_pkey" on "public"."state"( "id" asc, "connection_id" asc diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_39_17_001__AddStreamDescriptorsToStateTableTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_39_17_001__AddStreamDescriptorsToStateTableTest.java new file mode 100644 index 000000000000..901fedacda7b --- /dev/null +++ b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_39_17_001__AddStreamDescriptorsToStateTableTest.java @@ -0,0 +1,226 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.configs.migrations; + +import io.airbyte.db.factory.FlywayFactory; +import io.airbyte.db.instance.configs.AbstractConfigsDatabaseTest; +import io.airbyte.db.instance.configs.ConfigsDatabaseMigrator; +import io.airbyte.db.instance.configs.migrations.V0_32_8_001__AirbyteConfigDatabaseDenormalization.ActorType; +import io.airbyte.db.instance.configs.migrations.V0_32_8_001__AirbyteConfigDatabaseDenormalization.NamespaceDefinitionType; +import io.airbyte.db.instance.configs.migrations.V0_39_17_001__AddStreamDescriptorsToStateTable.StateType; +import io.airbyte.db.instance.development.DevDatabaseMigrator; +import java.util.UUID; +import org.flywaydb.core.Flyway; +import org.jooq.DSLContext; +import org.jooq.JSONB; +import org.jooq.exception.DataAccessException; +import org.jooq.impl.DSL; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +public class V0_39_17_001__AddStreamDescriptorsToStateTableTest extends AbstractConfigsDatabaseTest { + + private final String STATE_TABLE = "State"; + + private UUID connection1; + private UUID connection2; + + @Test + public void testSimpleMigration() { + final DSLContext context = getDslContext(); + + // Adding a couple of states + context.insertInto(DSL.table(STATE_TABLE)) + .columns( + DSL.field("id"), + DSL.field("connection_id")) + .values(UUID.randomUUID(), connection1) + .values(UUID.randomUUID(), connection2) + .execute(); + + // Preconditions check: we should have one row in state + Assertions.assertEquals(2, context.select().from(STATE_TABLE).execute()); + + // Applying the migration + devConfigsDbMigrator.migrate(); + + final UUID newState = UUID.randomUUID(); + context.insertInto(DSL.table(STATE_TABLE)) + .columns( + DSL.field("id"), + DSL.field("connection_id"), + DSL.field("stream_name")) + .values(newState, connection1, "new_stream") + .execute(); + + System.out.println(context.selectFrom("connection").fetch()); + System.out.println(context.selectFrom(STATE_TABLE).fetch()); + + // Our two initial rows and the new row should be LEGACY + Assertions.assertEquals(3, + context.select() + .from(STATE_TABLE) + .where(DSL.field("type").equal(StateType.LEGACY)) + .execute()); + + // There should be no STREAM or GLOBAL + Assertions.assertEquals(0, + context.select() + .from(STATE_TABLE) + .where(DSL.field("type").in(StateType.GLOBAL, StateType.STREAM)) + .execute()); + } + + @Test + public void testUniquenessConstraint() { + devConfigsDbMigrator.migrate(); + + final DSLContext context = getDslContext(); + context.insertInto(DSL.table(STATE_TABLE)) + .columns( + DSL.field("id"), + DSL.field("connection_id"), + DSL.field("type"), + DSL.field("stream_name"), + DSL.field("namespace")) + .values(UUID.randomUUID(), connection1, StateType.GLOBAL, "stream1", "ns2") + .execute(); + + context.insertInto(DSL.table(STATE_TABLE)) + .columns( + DSL.field("id"), + DSL.field("connection_id"), + DSL.field("type"), + DSL.field("stream_name"), + DSL.field("namespace")) + .values(UUID.randomUUID(), connection1, StateType.GLOBAL, "stream1", "ns1") + .execute(); + + context.insertInto(DSL.table(STATE_TABLE)) + .columns( + DSL.field("id"), + DSL.field("connection_id"), + DSL.field("type"), + DSL.field("stream_name"), + DSL.field("namespace")) + .values(UUID.randomUUID(), connection1, StateType.GLOBAL, "stream2", "ns2") + .execute(); + + Assertions.assertThrows(DataAccessException.class, () -> { + context.insertInto(DSL.table(STATE_TABLE)) + .columns( + DSL.field("id"), + DSL.field("connection_id"), + DSL.field("type"), + DSL.field("stream_name"), + DSL.field("namespace")) + .values(UUID.randomUUID(), connection1, StateType.GLOBAL, "stream1", "ns2") + .execute(); + }); + } + + @BeforeEach + public void beforeEach() { + Flyway flyway = FlywayFactory.create(dataSource, "V0_39_17_001__AddStreamDescriptorsToStateTableTest", ConfigsDatabaseMigrator.DB_IDENTIFIER, + ConfigsDatabaseMigrator.MIGRATION_FILE_LOCATION); + ConfigsDatabaseMigrator configsDbMigrator = new ConfigsDatabaseMigrator(database, flyway); + devConfigsDbMigrator = new DevDatabaseMigrator(configsDbMigrator); + + devConfigsDbMigrator.createBaseline(); + injectMockData(); + } + + @AfterEach + public void afterEach() { + // Making sure we reset between tests + dslContext.dropSchemaIfExists("public").cascade().execute(); + dslContext.createSchema("public").execute(); + dslContext.setSchema("public").execute(); + } + + private void injectMockData() { + final DSLContext context = getDslContext(); + + UUID workspaceId = UUID.randomUUID(); + UUID actorId = UUID.randomUUID(); + UUID actorDefinitionId = UUID.randomUUID(); + connection1 = UUID.randomUUID(); + connection2 = UUID.randomUUID(); + + context.insertInto(DSL.table("workspace")) + .columns( + DSL.field("id"), + DSL.field("name"), + DSL.field("slug"), + DSL.field("initial_setup_complete")) + .values( + workspaceId, + "base workspace", + "base_workspace", + true) + .execute(); + context.insertInto(DSL.table("actor_definition")) + .columns( + DSL.field("id"), + DSL.field("name"), + DSL.field("docker_repository"), + DSL.field("docker_image_tag"), + DSL.field("actor_type"), + DSL.field("spec")) + .values( + actorDefinitionId, + "Jenkins", + "farosai/airbyte-jenkins-source", + "0.1.23", + ActorType.source, + JSONB.valueOf("{}")) + .execute(); + context.insertInto(DSL.table("actor")) + .columns( + DSL.field("id"), + DSL.field("workspace_id"), + DSL.field("actor_definition_id"), + DSL.field("name"), + DSL.field("configuration"), + DSL.field("actor_type")) + .values( + actorId, + workspaceId, + actorDefinitionId, + "ActorName", + JSONB.valueOf("{}"), + ActorType.source) + .execute(); + + insertConnection(context, connection1, actorId); + insertConnection(context, connection2, actorId); + } + + private void insertConnection(final DSLContext context, final UUID connectionId, final UUID actorId) { + context.insertInto(DSL.table("connection")) + .columns( + DSL.field("id"), + DSL.field("namespace_definition"), + DSL.field("source_id"), + DSL.field("destination_id"), + DSL.field("name"), + DSL.field("catalog"), + DSL.field("manual")) + .values( + connectionId, + NamespaceDefinitionType.source, + actorId, + actorId, + "Connection" + connectionId.toString(), + JSONB.valueOf("{}"), + true) + .execute(); + } + + private DevDatabaseMigrator devConfigsDbMigrator; + +} diff --git a/airbyte-db/jooq/build.gradle b/airbyte-db/jooq/build.gradle index 08b2751ae0e2..4ac3a2821cb4 100644 --- a/airbyte-db/jooq/build.gradle +++ b/airbyte-db/jooq/build.gradle @@ -12,7 +12,7 @@ dependencies { // jOOQ code generation implementation libs.jooq.codegen - implementation libs.testcontainers.postgresql + implementation libs.platform.testcontainers.postgresql // These are required because gradle might be using lower version of Jna from other // library transitive dependency. Can be removed if we can figure out which library is the cause. // Refer: https://github.com/testcontainers/testcontainers-java/issues/3834#issuecomment-825409079 @@ -21,6 +21,7 @@ dependencies { // The jOOQ code generator only has access to classes added to the jooqGenerator configuration jooqGenerator project(':airbyte-db:db-lib') + jooqGenerator libs.platform.testcontainers.postgresql } jooq { diff --git a/airbyte-integrations/bases/base-java/build.gradle b/airbyte-integrations/bases/base-java/build.gradle index c1e95c7476b1..3664cd4ce7c0 100644 --- a/airbyte-integrations/bases/base-java/build.gradle +++ b/airbyte-integrations/bases/base-java/build.gradle @@ -11,6 +11,7 @@ dependencies { api 'io.sentry:sentry:5.6.0' implementation 'commons-cli:commons-cli:1.4' + implementation 'net.i2p.crypto:eddsa:0.3.0' implementation 'org.apache.sshd:sshd-mina:2.8.0' // bouncycastle is pinned to version-match the transitive dependency from kubernetes client-java // because a version conflict causes "parameter object not a ECParameterSpec" on ssh tunnel initiation @@ -18,8 +19,8 @@ dependencies { implementation 'org.bouncycastle:bcpkix-jdk15on:1.66' implementation 'org.bouncycastle:bctls-jdk15on:1.66' - implementation libs.testcontainers - implementation libs.testcontainers.jdbc + implementation libs.connectors.testcontainers + implementation libs.connectors.testcontainers.jdbc implementation files(project(':airbyte-integrations:bases:base').airbyteDocker.outputs) diff --git a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/AirbyteTraceMessageUtility.java b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/AirbyteTraceMessageUtility.java index 765920ed2421..885a307b3eb3 100644 --- a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/AirbyteTraceMessageUtility.java +++ b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/AirbyteTraceMessageUtility.java @@ -9,8 +9,8 @@ import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteMessage.Type; import io.airbyte.protocol.models.AirbyteTraceMessage; -import java.util.Arrays; import java.util.function.Consumer; +import org.apache.commons.lang3.exception.ExceptionUtils; public final class AirbyteTraceMessageUtility { @@ -53,7 +53,7 @@ private static AirbyteMessage makeErrorTraceAirbyteMessage( .withFailureType(failureType) .withMessage(displayMessage) .withInternalMessage(e.toString()) - .withStackTrace(Arrays.toString(e.getStackTrace())))); + .withStackTrace(ExceptionUtils.getStackTrace(e)))); } private static AirbyteMessage makeAirbyteMessageFromTraceMessage(AirbyteTraceMessage airbyteTraceMessage) { diff --git a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/adaptive/AdaptiveDestinationRunner.java b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/adaptive/AdaptiveDestinationRunner.java new file mode 100644 index 000000000000..f5a5197e55d3 --- /dev/null +++ b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/adaptive/AdaptiveDestinationRunner.java @@ -0,0 +1,93 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.base.adaptive; + +import io.airbyte.integrations.base.Destination; +import io.airbyte.integrations.base.IntegrationRunner; +import java.util.function.Supplier; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * This class launches different variants of a destination connector based on where Airbyte is + * deployed. + */ +public class AdaptiveDestinationRunner { + + private static final Logger LOGGER = LoggerFactory.getLogger(AdaptiveDestinationRunner.class); + + private static final String DEPLOYMENT_MODE_KEY = "DEPLOYMENT_MODE"; + private static final String COULD_MODE = "CLOUD"; + + public static OssDestinationBuilder baseOnEnv() { + final String mode = System.getenv(DEPLOYMENT_MODE_KEY); + return new OssDestinationBuilder(mode); + } + + public static final class OssDestinationBuilder { + + private final String deploymentMode; + + private OssDestinationBuilder(final String deploymentMode) { + this.deploymentMode = deploymentMode; + } + + public CloudDestinationBuilder withOssDestination(final Supplier ossDestinationSupplier) { + return new CloudDestinationBuilder<>(deploymentMode, ossDestinationSupplier); + } + + } + + public static final class CloudDestinationBuilder { + + private final String deploymentMode; + private final Supplier ossDestinationSupplier; + + public CloudDestinationBuilder(final String deploymentMode, final Supplier ossDestinationSupplier) { + this.deploymentMode = deploymentMode; + this.ossDestinationSupplier = ossDestinationSupplier; + } + + public Runner withCloudDestination(final Supplier cloudDestinationSupplier) { + return new Runner<>(deploymentMode, ossDestinationSupplier, cloudDestinationSupplier); + } + + } + + public static final class Runner { + + private final String deploymentMode; + private final Supplier ossDestinationSupplier; + private final Supplier cloudDestinationSupplier; + + public Runner(final String deploymentMode, + final Supplier ossDestinationSupplier, + final Supplier cloudDestinationSupplier) { + this.deploymentMode = deploymentMode; + this.ossDestinationSupplier = ossDestinationSupplier; + this.cloudDestinationSupplier = cloudDestinationSupplier; + } + + private Destination getDestination() { + LOGGER.info("Running destination under deployment mode: {}", deploymentMode); + if (deploymentMode != null && deploymentMode.equals(COULD_MODE)) { + return cloudDestinationSupplier.get(); + } + if (deploymentMode == null) { + LOGGER.warn("Deployment mode is null, default to OSS mode"); + } + return ossDestinationSupplier.get(); + } + + public void run(final String[] args) throws Exception { + final Destination destination = getDestination(); + LOGGER.info("Starting destination: {}", destination.getClass().getName()); + new IntegrationRunner(destination).run(args); + LOGGER.info("Completed destination: {}", destination.getClass().getName()); + } + + } + +} diff --git a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/adaptive/AdaptiveSourceRunner.java b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/adaptive/AdaptiveSourceRunner.java new file mode 100644 index 000000000000..a914f9f08bec --- /dev/null +++ b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/adaptive/AdaptiveSourceRunner.java @@ -0,0 +1,92 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.base.adaptive; + +import io.airbyte.integrations.base.IntegrationRunner; +import io.airbyte.integrations.base.Source; +import java.util.function.Supplier; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * This class launches different variants of a source connector based on where Airbyte is deployed. + */ +public class AdaptiveSourceRunner { + + private static final Logger LOGGER = LoggerFactory.getLogger(AdaptiveSourceRunner.class); + + private static final String DEPLOYMENT_MODE_KEY = "DEPLOYMENT_MODE"; + private static final String COULD_MODE = "CLOUD"; + + public static OssSourceBuilder baseOnEnv() { + final String mode = System.getenv(DEPLOYMENT_MODE_KEY); + return new OssSourceBuilder(mode); + } + + public static final class OssSourceBuilder { + + private final String deploymentMode; + + private OssSourceBuilder(final String deploymentMode) { + this.deploymentMode = deploymentMode; + } + + public CloudSourceBuilder withOssSource(final Supplier ossSourceSupplier) { + return new CloudSourceBuilder<>(deploymentMode, ossSourceSupplier); + } + + } + + public static final class CloudSourceBuilder { + + private final String deploymentMode; + private final Supplier ossSourceSupplier; + + public CloudSourceBuilder(final String deploymentMode, final Supplier ossSourceSupplier) { + this.deploymentMode = deploymentMode; + this.ossSourceSupplier = ossSourceSupplier; + } + + public Runner withCloudSource(final Supplier cloudSourceSupplier) { + return new Runner<>(deploymentMode, ossSourceSupplier, cloudSourceSupplier); + } + + } + + public static final class Runner { + + private final String deploymentMode; + private final Supplier ossSourceSupplier; + private final Supplier cloudSourceSupplier; + + public Runner(final String deploymentMode, + final Supplier ossSourceSupplier, + final Supplier cloudSourceSupplier) { + this.deploymentMode = deploymentMode; + this.ossSourceSupplier = ossSourceSupplier; + this.cloudSourceSupplier = cloudSourceSupplier; + } + + private Source getSource() { + LOGGER.info("Running source under deployment mode: {}", deploymentMode); + if (deploymentMode != null && deploymentMode.equals(COULD_MODE)) { + return cloudSourceSupplier.get(); + } + if (deploymentMode == null) { + LOGGER.warn("Deployment mode is null, default to OSS mode"); + } + return ossSourceSupplier.get(); + } + + public void run(final String[] args) throws Exception { + final Source source = getSource(); + LOGGER.info("Starting source: {}", source.getClass().getName()); + new IntegrationRunner(source).run(args); + LOGGER.info("Completed source: {}", source.getClass().getName()); + } + + } + +} diff --git a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/ssh/SshBastionContainer.java b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/ssh/SshBastionContainer.java index 7b6032061ec7..9fba0d56785a 100644 --- a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/ssh/SshBastionContainer.java +++ b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/ssh/SshBastionContainer.java @@ -22,11 +22,9 @@ public class SshBastionContainer { private static final String SSH_USER = "sshuser"; private static final String SSH_PASSWORD = "secret"; - private Network network; private GenericContainer bastion; - public void initAndStartBastion() { - network = Network.newNetwork(); + public void initAndStartBastion(Network network) { bastion = new GenericContainer( new ImageFromDockerfile("bastion-test") .withFileFromClasspath("Dockerfile", "bastion/Dockerfile")) @@ -43,8 +41,7 @@ public JsonNode getTunnelConfig(final SshTunnel.TunnelMethod tunnelMethod, final .put("tunnel_host", Objects.requireNonNull(bastion.getContainerInfo().getNetworkSettings() .getNetworks() - .get(((Network.NetworkImpl) network).getName()) - .getIpAddress())) + .entrySet().stream().findFirst().get().getValue().getIpAddress())) .put("tunnel_method", tunnelMethod) .put("tunnel_port", bastion.getExposedPorts().get(0)) .put("tunnel_user", SSH_USER) @@ -66,8 +63,7 @@ public ImmutableMap.Builder getBasicDbConfigBuider(final JdbcDat return ImmutableMap.builder() .put("host", Objects.requireNonNull(db.getContainerInfo().getNetworkSettings() .getNetworks() - .get(((Network.NetworkImpl) getNetWork()).getName()) - .getIpAddress())) + .entrySet().stream().findFirst().get().getValue().getIpAddress())) .put("username", db.getUsername()) .put("password", db.getPassword()) .put("port", db.getExposedPorts().get(0)) @@ -75,16 +71,11 @@ public ImmutableMap.Builder getBasicDbConfigBuider(final JdbcDat .put("ssl", false); } - public Network getNetWork() { - return this.network; - } - public void stopAndCloseContainers(final JdbcDatabaseContainer db) { - db.stop(); - db.close(); bastion.stop(); bastion.close(); - network.close(); + db.stop(); + db.close(); } } diff --git a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/buffered_stream_consumer/BufferedStreamConsumer.java b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/buffered_stream_consumer/BufferedStreamConsumer.java index c7ae54a0ed81..e5ce77e303e3 100644 --- a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/buffered_stream_consumer/BufferedStreamConsumer.java +++ b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/buffered_stream_consumer/BufferedStreamConsumer.java @@ -13,6 +13,8 @@ import io.airbyte.integrations.base.AirbyteMessageConsumer; import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.base.FailureTrackingAirbyteMessageConsumer; +import io.airbyte.integrations.destination.dest_state_lifecycle_manager.DefaultDestStateLifecycleManager; +import io.airbyte.integrations.destination.dest_state_lifecycle_manager.DestStateLifecycleManager; import io.airbyte.integrations.destination.record_buffer.BufferingStrategy; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteMessage.Type; @@ -80,13 +82,11 @@ public class BufferedStreamConsumer extends FailureTrackingAirbyteMessageConsume private final Map streamToIgnoredRecordCount; private final Consumer outputRecordCollector; private final BufferingStrategy bufferingStrategy; + private final DestStateLifecycleManager stateManager; private boolean hasStarted; private boolean hasClosed; - private AirbyteMessage lastFlushedState; - private AirbyteMessage pendingState; - public BufferedStreamConsumer(final Consumer outputRecordCollector, final VoidCallable onStart, final BufferingStrategy bufferingStrategy, @@ -103,7 +103,7 @@ public BufferedStreamConsumer(final Consumer outputRecordCollect this.isValidRecord = isValidRecord; this.streamToIgnoredRecordCount = new HashMap<>(); this.bufferingStrategy = bufferingStrategy; - bufferingStrategy.registerFlushAllEventHook(this::flushQueueToDestination); + this.stateManager = new DefaultDestStateLifecycleManager(); } @Override @@ -134,23 +134,24 @@ protected void acceptTracked(final AirbyteMessage message) throws Exception { return; } - bufferingStrategy.addRecord(stream, message); + // if the buffer flushes, update the states appropriately. + if (bufferingStrategy.addRecord(stream, message)) { + markStatesAsFlushedToTmpDestination(); + } + } else if (message.getType() == Type.STATE) { - pendingState = message; + stateManager.addState(message); } else { LOGGER.warn("Unexpected message: " + message.getType()); } } - private void flushQueueToDestination() { - if (pendingState != null) { - lastFlushedState = pendingState; - pendingState = null; - } + private void markStatesAsFlushedToTmpDestination() { + stateManager.markPendingAsFlushed(); } - private void throwUnrecognizedStream(final ConfiguredAirbyteCatalog catalog, final AirbyteMessage message) { + private static void throwUnrecognizedStream(final ConfiguredAirbyteCatalog catalog, final AirbyteMessage message) { throw new IllegalArgumentException( String.format("Message contained record from a stream that was not in the catalog. \ncatalog: %s , \nmessage: %s", Jsons.serialize(catalog), Jsons.serialize(message))); @@ -169,24 +170,31 @@ protected void close(final boolean hasFailed) throws Exception { } else { LOGGER.info("executing on success close procedure."); bufferingStrategy.flushAll(); + markStatesAsFlushedToTmpDestination(); } bufferingStrategy.close(); try { - // if no state was emitted (i.e. full refresh), if there were still no failures, then we can - // still succeed. - if (lastFlushedState == null) { + // flushed is empty in 2 cases: + // 1. either it is full refresh (no state is emitted necessarily). + // 2. it is stream but no states were flushed. + // in both of these cases, if there was a failure, we should not bother committing. otherwise, + // attempt to commit. + if (stateManager.listFlushed().isEmpty()) { onClose.accept(hasFailed); } else { - // if any state message flushed that means we can still go for at least a partial success. + /* + * if any state message was flushed that means we should try to commit what we have. if + * hasFailed=false, then it could be full success. if hasFailed=true, then going for partial + * success. + */ onClose.accept(false); } // if onClose succeeds without exception then we can emit the state record because it means its // records were not only flushed, but committed. - if (lastFlushedState != null) { - outputRecordCollector.accept(lastFlushedState); - } + stateManager.markFlushedAsCommitted(); + stateManager.listCommitted().forEach(outputRecordCollector); } catch (final Exception e) { LOGGER.error("Close failed.", e); throw e; diff --git a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/dest_state_lifecycle_manager/DefaultDestStateLifecycleManager.java b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/dest_state_lifecycle_manager/DefaultDestStateLifecycleManager.java new file mode 100644 index 000000000000..0482e63ebd76 --- /dev/null +++ b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/dest_state_lifecycle_manager/DefaultDestStateLifecycleManager.java @@ -0,0 +1,114 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.dest_state_lifecycle_manager; + +import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Preconditions; +import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.AirbyteMessage.Type; +import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; +import java.util.Queue; +import java.util.function.Supplier; + +/** + * Detects the type of the state being received by anchoring on the first state type it sees. Fail + * if receives states of multiple types--each instance of this class can only support state messages + * of one type. The protocol specifies that a source should emit state messages of a single type + * during a sync, so a single instance of this manager is sufficient for a destination to track + * state during a sync. + * + * Strategy: Delegates state messages of each type to a StateManager that is appropriate to that + * state type. + * + * Per the protocol, if state type is not set, assumes the LEGACY state type. + */ +public class DefaultDestStateLifecycleManager implements DestStateLifecycleManager { + + private AirbyteStateType stateType; + private final Supplier internalStateManagerSupplier; + + public DefaultDestStateLifecycleManager() { + this(new DestSingleStateLifecycleManager(), new DestStreamStateLifecycleManager()); + } + + @VisibleForTesting + DefaultDestStateLifecycleManager(final DestStateLifecycleManager singleStateManager, final DestStateLifecycleManager streamStateManager) { + stateType = null; + // allows us to delegate calls to the appropriate underlying state manager. + internalStateManagerSupplier = () -> { + if (stateType == AirbyteStateType.GLOBAL || stateType == AirbyteStateType.LEGACY || stateType == null) { + return singleStateManager; + } else if (stateType == AirbyteStateType.STREAM) { + return streamStateManager; + } else { + throw new IllegalArgumentException("unrecognized state type"); + } + }; + } + + @Override + public void addState(final AirbyteMessage message) { + Preconditions.checkArgument(message.getType() == Type.STATE, "Messages passed to State Manager must be of type STATE."); + Preconditions.checkArgument(isStateTypeCompatible(stateType, message.getState().getType())); + + setManagerStateTypeIfNotSet(message); + + internalStateManagerSupplier.get().addState(message); + } + + /** + * Given the type of previously recorded state by the state manager, determines if a newly added + * state message's type is compatible. Based on the previously set state type, determines if a new + * one is compatible. If the previous state is null, any new state is compatible. If new state type + * is null, it should be treated as LEGACY. Thus, previousStateType == LEGACY and newStateType == + * null IS compatible. All other state types are compatible based on equality. + * + * @param previousStateType - state type previously recorded by the state manager + * @param newStateType - state message of a newly added message + * @return true if compatible, otherwise false + */ + private static boolean isStateTypeCompatible(final AirbyteStateType previousStateType, final AirbyteStateType newStateType) { + return previousStateType == null || previousStateType == AirbyteStateType.LEGACY && newStateType == null || previousStateType == newStateType; + } + + /** + * If the state type for the manager is not set, sets it using the state type from the message. If + * the type on the message is null, we assume it is LEGACY. After the first, state message is added + * to the manager, the state type is set and is immutable. + * + * @param message - state message whose state will be used if internal state type is not set + */ + private void setManagerStateTypeIfNotSet(final AirbyteMessage message) { + // detect and set state type. + if (stateType == null) { + if (message.getState().getType() == null) { + stateType = AirbyteStateType.LEGACY; + } else { + stateType = message.getState().getType(); + } + } + } + + @Override + public void markPendingAsFlushed() { + internalStateManagerSupplier.get().markPendingAsFlushed(); + } + + @Override + public Queue listFlushed() { + return internalStateManagerSupplier.get().listFlushed(); + } + + @Override + public void markFlushedAsCommitted() { + internalStateManagerSupplier.get().markFlushedAsCommitted(); + } + + @Override + public Queue listCommitted() { + return internalStateManagerSupplier.get().listCommitted(); + } + +} diff --git a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/dest_state_lifecycle_manager/DestSingleStateLifecycleManager.java b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/dest_state_lifecycle_manager/DestSingleStateLifecycleManager.java new file mode 100644 index 000000000000..79096c009f23 --- /dev/null +++ b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/dest_state_lifecycle_manager/DestSingleStateLifecycleManager.java @@ -0,0 +1,68 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.dest_state_lifecycle_manager; + +import com.google.common.annotations.VisibleForTesting; +import io.airbyte.protocol.models.AirbyteMessage; +import java.util.Collections; +import java.util.LinkedList; +import java.util.List; +import java.util.Queue; + +/** + * This {@link DestStateLifecycleManager} handles any state where there is a guarantee that any + * single state message represents the state for the ENTIRE connection. At the time of writing, + * GLOBAL and LEGACY state types are the state type that match this pattern. + * + * Does NOT store duplicates. Because each state message represents the entire state for the + * connection, it only stores (and emits) the LAST state it received at each phase. + */ +public class DestSingleStateLifecycleManager implements DestStateLifecycleManager { + + private AirbyteMessage lastPendingState; + private AirbyteMessage lastFlushedState; + private AirbyteMessage lastCommittedState; + + @Override + public void addState(final AirbyteMessage message) { + lastPendingState = message; + } + + @VisibleForTesting + Queue listPending() { + return stateMessageToQueue(lastPendingState); + } + + @Override + public void markPendingAsFlushed() { + if (lastPendingState != null) { + lastFlushedState = lastPendingState; + lastPendingState = null; + } + } + + @Override + public Queue listFlushed() { + return stateMessageToQueue(lastFlushedState); + } + + @Override + public void markFlushedAsCommitted() { + if (lastFlushedState != null) { + lastCommittedState = lastFlushedState; + lastFlushedState = null; + } + } + + @Override + public Queue listCommitted() { + return stateMessageToQueue(lastCommittedState); + } + + private static Queue stateMessageToQueue(final AirbyteMessage stateMessage) { + return new LinkedList<>(stateMessage == null ? Collections.emptyList() : List.of(stateMessage)); + } + +} diff --git a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/dest_state_lifecycle_manager/DestStateLifecycleManager.java b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/dest_state_lifecycle_manager/DestStateLifecycleManager.java new file mode 100644 index 000000000000..8db820c3dbe8 --- /dev/null +++ b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/dest_state_lifecycle_manager/DestStateLifecycleManager.java @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.dest_state_lifecycle_manager; + +import io.airbyte.protocol.models.AirbyteMessage; +import java.util.Queue; + +/** + * This class manages the lifecycle of state message. It tracks state messages that are in 3 states: + *
    + *
  1. pending - associated records have been accepted by the connector but has NOT been pushed to + * the destination
  2. + *
  3. flushed - associated records have been flushed to tmp storage in the destination but have NOT + * been committed
  4. + *
  5. committed - associated records have been committed
  6. + *
+ */ +public interface DestStateLifecycleManager { + + /** + * Accepts a state into the manager. The state starts in a pending state. + * + * @param message - airbyte message of type state + */ + void addState(AirbyteMessage message); + + /** + * Moves any tracked state messages that are currently pending to flushed. + */ + void markPendingAsFlushed(); + + /** + * List all tracked state messages that are flushed. + * + * @return list of state messages + */ + Queue listFlushed(); + + /** + * Moves any tracked state messages that are currently flushed to committed. + */ + void markFlushedAsCommitted(); + + /** + * List all tracked state messages that are committed. + * + * @return list of state messages + */ + Queue listCommitted(); + +} diff --git a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/dest_state_lifecycle_manager/DestStreamStateLifecycleManager.java b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/dest_state_lifecycle_manager/DestStreamStateLifecycleManager.java new file mode 100644 index 000000000000..8311e0adcb2e --- /dev/null +++ b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/dest_state_lifecycle_manager/DestStreamStateLifecycleManager.java @@ -0,0 +1,110 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.dest_state_lifecycle_manager; + +import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Preconditions; +import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; +import io.airbyte.protocol.models.StreamDescriptor; +import java.util.Comparator; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Queue; +import java.util.stream.Collectors; + +/** + * This {@link DestStateLifecycleManager} handles any state where the state messages are scoped by + * stream. In these cases, at each state of the process, it tracks the LAST state message for EACH + * stream (no duplicates!). + * + * Guaranteed to output state messages in order relative to other messages of the SAME state. Does + * NOT guarantee that state messages of different streams will be output in the order in which they + * were received. State messages across streams will be emitted in alphabetical order (primary sort + * on namespace, secondary on name). + */ +public class DestStreamStateLifecycleManager implements DestStateLifecycleManager { + + private final Map streamToLastPendingState; + private final Map streamToLastFlushedState; + private final Map streamToLastCommittedState; + + public DestStreamStateLifecycleManager() { + streamToLastPendingState = new HashMap<>(); + streamToLastFlushedState = new HashMap<>(); + streamToLastCommittedState = new HashMap<>(); + } + + @Override + public void addState(final AirbyteMessage message) { + Preconditions.checkArgument(message.getState().getType() == AirbyteStateType.STREAM); + streamToLastPendingState.put(message.getState().getStream().getStreamDescriptor(), message); + } + + @VisibleForTesting + Queue listPending() { + return listStatesInOrder(streamToLastPendingState); + } + + @Override + public void markPendingAsFlushed() { + moveToNextPhase(streamToLastPendingState, streamToLastFlushedState); + } + + @Override + public Queue listFlushed() { + return listStatesInOrder(streamToLastFlushedState); + } + + @Override + public void markFlushedAsCommitted() { + moveToNextPhase(streamToLastFlushedState, streamToLastCommittedState); + } + + @Override + public Queue listCommitted() { + return listStatesInOrder(streamToLastCommittedState); + } + + /** + * Lists out the states in the stream to state maps. Guarantees a deterministic sort order, which is + * handy because we are going from a map (unsorted) to a queue. The sort order primary sort on + * namespace (with null at the top) followed by secondary sort on name. This maps onto the pretty + * common order that we list streams elsewhere. + * + * @param streamToState - map of stream descriptor to its last state + * @return queue with the states ordered per the sort mentioned above + */ + private static Queue listStatesInOrder(final Map streamToState) { + return streamToState + .entrySet() + .stream() + // typically, we support by namespace and then stream name, so we retain that pattern here. + .sorted(Comparator + ., String>comparing( + entry -> entry.getKey().getNamespace(), + Comparator.nullsFirst(Comparator.naturalOrder())) // namespace is allowed to be null + .thenComparing(entry -> entry.getKey().getName())) + .map(Entry::getValue) + .collect(Collectors.toCollection(LinkedList::new)); + } + + /** + * Moves all state messages from previous phase into next phase. + * + * @param prevPhase - map of stream to state messages for previous phase that will be moved to next + * phase. when this method returns this map will be empty. + * @param nextPhase - map into which state messages from prevPhase will be added. + */ + private static void moveToNextPhase(final Map prevPhase, final Map nextPhase) { + if (!prevPhase.isEmpty()) { + nextPhase.putAll(prevPhase); + prevPhase.clear(); + } + } + +} diff --git a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/record_buffer/BufferingStrategy.java b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/record_buffer/BufferingStrategy.java index 85f0d0022763..b63890666628 100644 --- a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/record_buffer/BufferingStrategy.java +++ b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/record_buffer/BufferingStrategy.java @@ -4,7 +4,6 @@ package io.airbyte.integrations.destination.record_buffer; -import io.airbyte.commons.concurrency.VoidCallable; import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.AirbyteMessage; @@ -22,8 +21,13 @@ public interface BufferingStrategy extends AutoCloseable { /** * Add a new message to the buffer while consuming streams + * + * @param stream - stream associated with record + * @param message - message to buffer + * @return true if this record cause ALL records in the buffer to flush, otherwise false. + * @throws Exception throw on failure */ - void addRecord(AirbyteStreamNameNamespacePair stream, AirbyteMessage message) throws Exception; + boolean addRecord(AirbyteStreamNameNamespacePair stream, AirbyteMessage message) throws Exception; /** * Flush buffered messages in a writer from a particular stream @@ -40,12 +44,4 @@ public interface BufferingStrategy extends AutoCloseable { */ void clear() throws Exception; - /** - * When all buffers are being flushed, we can signal some parent function of this event for further - * processing. - * - * THis install such a hook to be triggered when that happens. - */ - void registerFlushAllEventHook(VoidCallable onFlushAllEventHook); - } diff --git a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/record_buffer/InMemoryRecordBufferingStrategy.java b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/record_buffer/InMemoryRecordBufferingStrategy.java index d01454b500ee..50f01ceece6b 100644 --- a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/record_buffer/InMemoryRecordBufferingStrategy.java +++ b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/record_buffer/InMemoryRecordBufferingStrategy.java @@ -4,7 +4,6 @@ package io.airbyte.integrations.destination.record_buffer; -import io.airbyte.commons.concurrency.VoidCallable; import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.base.sentry.AirbyteSentry; import io.airbyte.integrations.destination.buffered_stream_consumer.CheckAndRemoveRecordWriter; @@ -39,7 +38,6 @@ public class InMemoryRecordBufferingStrategy implements BufferingStrategy { private final RecordSizeEstimator recordSizeEstimator; private final long maxQueueSizeInBytes; private long bufferSizeInBytes; - private VoidCallable onFlushAllEventHook; public InMemoryRecordBufferingStrategy(final RecordWriter recordWriter, final long maxQueueSizeInBytes) { @@ -55,20 +53,24 @@ public InMemoryRecordBufferingStrategy(final RecordWriter this.maxQueueSizeInBytes = maxQueueSizeInBytes; this.bufferSizeInBytes = 0; this.recordSizeEstimator = new RecordSizeEstimator(); - this.onFlushAllEventHook = null; } @Override - public void addRecord(final AirbyteStreamNameNamespacePair stream, final AirbyteMessage message) throws Exception { + public boolean addRecord(final AirbyteStreamNameNamespacePair stream, final AirbyteMessage message) throws Exception { + boolean didFlush = false; + final long messageSizeInBytes = recordSizeEstimator.getEstimatedByteSize(message.getRecord()); if (bufferSizeInBytes + messageSizeInBytes > maxQueueSizeInBytes) { flushAll(); + didFlush = true; bufferSizeInBytes = 0; } final List bufferedRecords = streamBuffer.computeIfAbsent(stream, k -> new ArrayList<>()); bufferedRecords.add(message.getRecord()); bufferSizeInBytes += messageSizeInBytes; + + return didFlush; } @Override @@ -91,10 +93,6 @@ public void flushAll() throws Exception { }, Map.of("bufferSizeInBytes", bufferSizeInBytes)); close(); clear(); - - if (onFlushAllEventHook != null) { - onFlushAllEventHook.call(); - } } @Override @@ -102,11 +100,6 @@ public void clear() { streamBuffer = new HashMap<>(); } - @Override - public void registerFlushAllEventHook(final VoidCallable onFlushAllEventHook) { - this.onFlushAllEventHook = onFlushAllEventHook; - } - @Override public void close() throws Exception {} diff --git a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/record_buffer/SerializedBufferingStrategy.java b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/record_buffer/SerializedBufferingStrategy.java index ee4b5b441750..4ae15e7bdb59 100644 --- a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/record_buffer/SerializedBufferingStrategy.java +++ b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/record_buffer/SerializedBufferingStrategy.java @@ -4,7 +4,6 @@ package io.airbyte.integrations.destination.record_buffer; -import io.airbyte.commons.concurrency.VoidCallable; import io.airbyte.commons.functional.CheckedBiConsumer; import io.airbyte.commons.functional.CheckedBiFunction; import io.airbyte.commons.string.Strings; @@ -27,7 +26,6 @@ public class SerializedBufferingStrategy implements BufferingStrategy { private final CheckedBiFunction onCreateBuffer; private final CheckedBiConsumer onStreamFlush; - private VoidCallable onFlushAllEventHook; private Map allBuffers = new HashMap<>(); private long totalBufferSizeInBytes; @@ -40,16 +38,11 @@ public SerializedBufferingStrategy(final CheckedBiFunction { LOGGER.info("Starting a new buffer for stream {} (current state: {} in {} buffers)", @@ -71,10 +64,28 @@ public void addRecord(final AirbyteStreamNameNamespacePair stream, final Airbyte if (totalBufferSizeInBytes >= streamBuffer.getMaxTotalBufferSizeInBytes() || allBuffers.size() >= streamBuffer.getMaxConcurrentStreamsInBuffer()) { flushAll(); + didFlush = true; totalBufferSizeInBytes = 0; } else if (streamBuffer.getByteCount() >= streamBuffer.getMaxPerStreamBufferSizeInBytes()) { flushWriter(stream, streamBuffer); + /* + * Note: We intentionally do not mark didFlush as true in the branch of this conditional. Because + * this branch flushes individual streams, there is no guaranteee that it will flush records in the + * same order that state messages were received. The outcome here is that records get flushed but + * our updating of which state messages have been flushed falls behind. + * + * This is not ideal from a checkpoint point of view, because it means in the case where there is a + * failure, we will not be able to report that those records that were flushed and committed were + * committed because there corresponding state messages weren't marked as flushed. Thus, it weakens + * checkpointing, but it does not cause a correctness issue. + * + * In non-failure cases, using this conditional branch relies on the state messages getting flushed + * by some other means. That can be caused by the previous branch in this conditional. It is + * guaranteed by the fact that we always flush all state messages at the end of a sync. + */ } + + return didFlush; } @Override @@ -99,9 +110,6 @@ public void flushAll() throws Exception { clear(); }, Map.of("bufferSizeInBytes", totalBufferSizeInBytes)); - if (onFlushAllEventHook != null) { - onFlushAllEventHook.call(); - } totalBufferSizeInBytes = 0; } diff --git a/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/base/AirbyteTraceMessageUtilityTest.java b/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/base/AirbyteTraceMessageUtilityTest.java index 19146e15b37b..0c5351552899 100644 --- a/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/base/AirbyteTraceMessageUtilityTest.java +++ b/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/base/AirbyteTraceMessageUtilityTest.java @@ -36,13 +36,17 @@ private void assertJsonNodeIsTraceMessage(JsonNode jsonNode) { @Test void testEmitSystemErrorTrace() { AirbyteTraceMessageUtility.emitSystemErrorTrace(Mockito.mock(RuntimeException.class), "this is a system error"); - assertJsonNodeIsTraceMessage(Jsons.deserialize(outContent.toString(StandardCharsets.UTF_8))); + JsonNode outJson = Jsons.deserialize(outContent.toString(StandardCharsets.UTF_8)); + assertJsonNodeIsTraceMessage(outJson); + Assertions.assertEquals("system_error", outJson.get("trace").get("error").get("failure_type").asText()); } @Test void testEmitConfigErrorTrace() { AirbyteTraceMessageUtility.emitConfigErrorTrace(Mockito.mock(RuntimeException.class), "this is a config error"); - assertJsonNodeIsTraceMessage(Jsons.deserialize(outContent.toString(StandardCharsets.UTF_8))); + JsonNode outJson = Jsons.deserialize(outContent.toString(StandardCharsets.UTF_8)); + assertJsonNodeIsTraceMessage(outJson); + Assertions.assertEquals("config_error", outJson.get("trace").get("error").get("failure_type").asText()); } @Test @@ -51,6 +55,17 @@ void testEmitErrorTrace() { assertJsonNodeIsTraceMessage(Jsons.deserialize(outContent.toString(StandardCharsets.UTF_8))); } + @Test + void testCorrectStacktraceFormat() { + try { + int x = 1 / 0; + } catch (Exception e) { + AirbyteTraceMessageUtility.emitSystemErrorTrace(e, "you exploded the universe"); + } + JsonNode outJson = Jsons.deserialize(outContent.toString(StandardCharsets.UTF_8)); + Assertions.assertTrue(outJson.get("trace").get("error").get("stack_trace").asText().contains("\n\tat")); + } + @AfterEach public void revertOut() { System.setOut(originalOut); diff --git a/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/base/ssh/SshTunnelTest.java b/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/base/ssh/SshTunnelTest.java new file mode 100644 index 000000000000..f223104b98a1 --- /dev/null +++ b/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/base/ssh/SshTunnelTest.java @@ -0,0 +1,57 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.base.ssh; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.nio.charset.StandardCharsets; +import java.security.KeyPair; +import java.security.PrivateKey; +import java.security.PublicKey; +import org.apache.sshd.common.util.security.SecurityUtils; +import org.apache.sshd.common.util.security.eddsa.EdDSASecurityProviderRegistrar; +import org.junit.jupiter.api.Test; + +class SshTunnelTest { + + /** + * This test verifies that 'net.i2p.crypto:eddsa' is present and EdDSA is supported. If + * net.i2p.crypto:eddsa will be removed from project, then will be thrown: generator not correctly + * initialized + * + * @throws Exception + */ + @Test + public void edDsaIsSupported() throws Exception { + var keygen = SecurityUtils.getKeyPairGenerator("EdDSA"); + final String message = "hello world"; + KeyPair keyPair = keygen.generateKeyPair(); + + byte[] signedMessage = sign(keyPair.getPrivate(), message); + + assertTrue(new EdDSASecurityProviderRegistrar().isSupported()); + assertTrue(verify(keyPair.getPublic(), signedMessage, message)); + } + + private byte[] sign(final PrivateKey privateKey, final String message) throws Exception { + var signature = SecurityUtils.getSignature("NONEwithEdDSA"); + signature.initSign(privateKey); + + signature.update(message.getBytes(StandardCharsets.UTF_8)); + + return signature.sign(); + } + + private boolean verify(final PublicKey publicKey, byte[] signed, final String message) + throws Exception { + var signature = SecurityUtils.getSignature("NONEwithEdDSA"); + signature.initVerify(publicKey); + + signature.update(message.getBytes(StandardCharsets.UTF_8)); + + return signature.verify(signed); + } + +} diff --git a/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/destination/dest_state_lifecycle_manager/DefaultDestStateLifecycleManagerTest.java b/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/destination/dest_state_lifecycle_manager/DefaultDestStateLifecycleManagerTest.java new file mode 100644 index 000000000000..f92ee2828045 --- /dev/null +++ b/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/destination/dest_state_lifecycle_manager/DefaultDestStateLifecycleManagerTest.java @@ -0,0 +1,124 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.dest_state_lifecycle_manager; + +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; + +import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.AirbyteMessage.Type; +import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; +import io.airbyte.protocol.models.AirbyteStreamState; +import io.airbyte.protocol.models.StreamDescriptor; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +class DefaultDestStateLifecycleManagerTest { + + private static final AirbyteMessage UNSET_TYPE_MESSAGE = new AirbyteMessage() + .withType(Type.STATE) + .withState(new AirbyteStateMessage()); + private static final AirbyteMessage LEGACY_MESSAGE = new AirbyteMessage() + .withType(Type.STATE) + .withState(new AirbyteStateMessage().withType(AirbyteStateType.LEGACY)); + private static final AirbyteMessage GLOBAL_MESSAGE = new AirbyteMessage() + .withType(Type.STATE) + .withState(new AirbyteStateMessage().withType(AirbyteStateType.GLOBAL)); + private static final AirbyteMessage STREAM_MESSAGE = new AirbyteMessage() + .withType(Type.STATE) + .withState(new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("users")))); + + private DestStateLifecycleManager mgr1; + private DestStateLifecycleManager singleStateMgr; + private DestStateLifecycleManager streamMgr; + + @BeforeEach + void setup() { + singleStateMgr = mock(DestStateLifecycleManager.class); + streamMgr = mock(DestStateLifecycleManager.class); + mgr1 = new DefaultDestStateLifecycleManager(singleStateMgr, streamMgr); + } + + @Test + void testFailsOnIncompatibleStates() { + final DefaultDestStateLifecycleManager manager1 = new DefaultDestStateLifecycleManager(singleStateMgr, streamMgr); + manager1.addState(UNSET_TYPE_MESSAGE); + manager1.addState(UNSET_TYPE_MESSAGE); + manager1.addState(LEGACY_MESSAGE); + assertThrows(IllegalArgumentException.class, () -> manager1.addState(GLOBAL_MESSAGE)); + assertThrows(IllegalArgumentException.class, () -> manager1.addState(STREAM_MESSAGE)); + + final DefaultDestStateLifecycleManager manager2 = new DefaultDestStateLifecycleManager(singleStateMgr, streamMgr); + manager2.addState(LEGACY_MESSAGE); + manager2.addState(LEGACY_MESSAGE); + manager2.addState(UNSET_TYPE_MESSAGE); + assertThrows(IllegalArgumentException.class, () -> manager2.addState(GLOBAL_MESSAGE)); + assertThrows(IllegalArgumentException.class, () -> manager2.addState(STREAM_MESSAGE)); + + final DefaultDestStateLifecycleManager manager3 = new DefaultDestStateLifecycleManager(singleStateMgr, streamMgr); + manager3.addState(GLOBAL_MESSAGE); + manager3.addState(GLOBAL_MESSAGE); + assertThrows(IllegalArgumentException.class, () -> manager3.addState(UNSET_TYPE_MESSAGE)); + assertThrows(IllegalArgumentException.class, () -> manager3.addState(LEGACY_MESSAGE)); + assertThrows(IllegalArgumentException.class, () -> manager3.addState(STREAM_MESSAGE)); + + final DefaultDestStateLifecycleManager manager4 = new DefaultDestStateLifecycleManager(singleStateMgr, streamMgr); + manager4.addState(STREAM_MESSAGE); + manager4.addState(STREAM_MESSAGE); + assertThrows(IllegalArgumentException.class, () -> manager4.addState(UNSET_TYPE_MESSAGE)); + assertThrows(IllegalArgumentException.class, () -> manager4.addState(LEGACY_MESSAGE)); + assertThrows(IllegalArgumentException.class, () -> manager4.addState(GLOBAL_MESSAGE)); + } + + @Test + void testDelegatesLegacyMessages() { + mgr1.addState(UNSET_TYPE_MESSAGE); + mgr1.addState(LEGACY_MESSAGE); + mgr1.markPendingAsFlushed(); + mgr1.markFlushedAsCommitted(); + mgr1.listFlushed(); + mgr1.listCommitted(); + verify(singleStateMgr).addState(UNSET_TYPE_MESSAGE); + verify(singleStateMgr).addState(LEGACY_MESSAGE); + verify(singleStateMgr).markPendingAsFlushed(); + verify(singleStateMgr).markFlushedAsCommitted(); + verify(singleStateMgr).listFlushed(); + verify(singleStateMgr).listCommitted(); + } + + @Test + void testDelegatesGlobalMessages() { + mgr1.addState(GLOBAL_MESSAGE); + mgr1.markPendingAsFlushed(); + mgr1.markFlushedAsCommitted(); + mgr1.listFlushed(); + mgr1.listCommitted(); + verify(singleStateMgr).addState(GLOBAL_MESSAGE); + verify(singleStateMgr).markPendingAsFlushed(); + verify(singleStateMgr).markFlushedAsCommitted(); + verify(singleStateMgr).listFlushed(); + verify(singleStateMgr).listCommitted(); + } + + @Test + void testDelegatesStreamMessages() { + mgr1.addState(STREAM_MESSAGE); + mgr1.markPendingAsFlushed(); + mgr1.markFlushedAsCommitted(); + mgr1.listFlushed(); + mgr1.listCommitted(); + + verify(streamMgr).addState(STREAM_MESSAGE); + verify(streamMgr).markPendingAsFlushed(); + verify(streamMgr).markFlushedAsCommitted(); + verify(streamMgr).listFlushed(); + verify(streamMgr).listCommitted(); + } + +} diff --git a/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/destination/dest_state_lifecycle_manager/DestSingleStateLifecycleManagerTest.java b/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/destination/dest_state_lifecycle_manager/DestSingleStateLifecycleManagerTest.java new file mode 100644 index 000000000000..c027aa8da483 --- /dev/null +++ b/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/destination/dest_state_lifecycle_manager/DestSingleStateLifecycleManagerTest.java @@ -0,0 +1,122 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.dest_state_lifecycle_manager; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import io.airbyte.commons.json.Jsons; +import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.AirbyteMessage.Type; +import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +class DestSingleStateLifecycleManagerTest { + + private static final AirbyteMessage MESSAGE1 = new AirbyteMessage() + .withType(Type.STATE) + .withState(new AirbyteStateMessage().withType(AirbyteStateType.GLOBAL).withData(Jsons.jsonNode("a"))); + private static final AirbyteMessage MESSAGE2 = new AirbyteMessage() + .withType(Type.STATE) + .withState(new AirbyteStateMessage().withType(AirbyteStateType.GLOBAL).withData(Jsons.jsonNode("b"))); + + private DestSingleStateLifecycleManager mgr; + + @BeforeEach + void setup() { + mgr = new DestSingleStateLifecycleManager(); + } + + /** + * Demonstrates expected lifecycle of a state object for documentation purposes. Subsequent test get + * into the details. + */ + @Test + void testBasicLifeCycle() { + // starts with no state. + assertTrue(mgr.listPending().isEmpty()); + assertTrue(mgr.listFlushed().isEmpty()); + assertTrue(mgr.listCommitted().isEmpty()); + + mgr.addState(MESSAGE1); + // new state supersedes previous ones. we should only see MESSAGE2 from here on out. + mgr.addState(MESSAGE2); + + // after adding a state, it is in pending only. + assertEquals(MESSAGE2, mgr.listPending().poll()); + assertTrue(mgr.listFlushed().isEmpty()); + assertTrue(mgr.listCommitted().isEmpty()); + + mgr.markPendingAsFlushed(); + + // after flushing the state it is in flushed only. + assertTrue(mgr.listPending().isEmpty()); + assertEquals(MESSAGE2, mgr.listFlushed().poll()); + assertTrue(mgr.listCommitted().isEmpty()); + + // after committing the state it is in committed only. + mgr.markFlushedAsCommitted(); + + assertTrue(mgr.listPending().isEmpty()); + assertTrue(mgr.listFlushed().isEmpty()); + assertEquals(MESSAGE2, mgr.listCommitted().poll()); + } + + @Test + void testPending() { + mgr.addState(MESSAGE1); + mgr.addState(MESSAGE2); + + // verify the LAST message is returned. + assertEquals(MESSAGE2, mgr.listPending().poll()); + assertTrue(mgr.listFlushed().isEmpty()); + assertTrue(mgr.listCommitted().isEmpty()); + } + + @Test + void testFlushed() { + mgr.addState(MESSAGE1); + mgr.addState(MESSAGE2); + mgr.markPendingAsFlushed(); + + assertTrue(mgr.listPending().isEmpty()); + assertEquals(MESSAGE2, mgr.listFlushed().poll()); + assertTrue(mgr.listCommitted().isEmpty()); + + // verify that multiple calls to markPendingAsFlushed overwrite old states + mgr.addState(MESSAGE1); + mgr.markPendingAsFlushed(); + mgr.markPendingAsFlushed(); + + assertTrue(mgr.listPending().isEmpty()); + assertEquals(MESSAGE1, mgr.listFlushed().poll()); + assertTrue(mgr.listCommitted().isEmpty()); + } + + @Test + void testCommitted() { + mgr.addState(MESSAGE1); + mgr.addState(MESSAGE2); + mgr.markPendingAsFlushed(); + mgr.markFlushedAsCommitted(); + + assertTrue(mgr.listPending().isEmpty()); + assertTrue(mgr.listFlushed().isEmpty()); + assertEquals(MESSAGE2, mgr.listCommitted().poll()); + + // verify that multiple calls to markFlushedAsCommitted overwrite old states + mgr.addState(MESSAGE1); + mgr.markPendingAsFlushed(); + mgr.markFlushedAsCommitted(); + mgr.markFlushedAsCommitted(); + + assertTrue(mgr.listPending().isEmpty()); + assertTrue(mgr.listFlushed().isEmpty()); + assertEquals(MESSAGE1, mgr.listCommitted().poll()); + } + +} diff --git a/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/destination/dest_state_lifecycle_manager/DestStreamStateLifecycleManagerTest.java b/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/destination/dest_state_lifecycle_manager/DestStreamStateLifecycleManagerTest.java new file mode 100644 index 000000000000..8894cb133437 --- /dev/null +++ b/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/destination/dest_state_lifecycle_manager/DestStreamStateLifecycleManagerTest.java @@ -0,0 +1,140 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.dest_state_lifecycle_manager; + +import static org.junit.jupiter.api.Assertions.*; + +import io.airbyte.commons.json.Jsons; +import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.AirbyteMessage.Type; +import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; +import io.airbyte.protocol.models.AirbyteStreamState; +import io.airbyte.protocol.models.StreamDescriptor; +import java.util.LinkedList; +import java.util.List; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +class DestStreamStateLifecycleManagerTest { + + private static final AirbyteMessage STREAM1_MESSAGE1 = new AirbyteMessage() + .withType(Type.STATE) + .withState(new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("apples")).withStreamState(Jsons.jsonNode("a")))); + private static final AirbyteMessage STREAM1_MESSAGE2 = new AirbyteMessage() + .withType(Type.STATE) + .withState(new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("apples")).withStreamState(Jsons.jsonNode("b")))); + private static final AirbyteMessage STREAM2_MESSAGE1 = new AirbyteMessage() + .withType(Type.STATE) + .withState(new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream( + new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("bananas")).withStreamState(Jsons.jsonNode("10")))); + + private DestStreamStateLifecycleManager mgr; + + @BeforeEach + void setup() { + mgr = new DestStreamStateLifecycleManager(); + } + + /** + * Demonstrates expected lifecycle of a state object for documentation purposes. Subsequent test get + * into the details. + */ + @Test + void testBasicLifeCycle() { + // starts with no state. + assertTrue(mgr.listPending().isEmpty()); + assertTrue(mgr.listFlushed().isEmpty()); + assertTrue(mgr.listCommitted().isEmpty()); + + mgr.addState(STREAM1_MESSAGE1); + // new state supersedes previous ones. we should only see MESSAGE2 for STREAM1 from here on out. + mgr.addState(STREAM1_MESSAGE2); + // different stream, thus does not interact with messages from STREAM1. + mgr.addState(STREAM2_MESSAGE1); + + // after adding a state, it is in pending only. + assertEquals(new LinkedList<>(List.of(STREAM1_MESSAGE2, STREAM2_MESSAGE1)), mgr.listPending()); + assertTrue(mgr.listFlushed().isEmpty()); + assertTrue(mgr.listCommitted().isEmpty()); + + mgr.markPendingAsFlushed(); + + // after flushing the state it is in flushed only. + assertTrue(mgr.listPending().isEmpty()); + assertEquals(new LinkedList<>(List.of(STREAM1_MESSAGE2, STREAM2_MESSAGE1)), mgr.listFlushed()); + assertTrue(mgr.listCommitted().isEmpty()); + + // after committing the state it is in committed only. + mgr.markFlushedAsCommitted(); + + assertTrue(mgr.listPending().isEmpty()); + assertTrue(mgr.listFlushed().isEmpty()); + assertEquals(new LinkedList<>(List.of(STREAM1_MESSAGE2, STREAM2_MESSAGE1)), mgr.listCommitted()); + } + + @Test + void testPending() { + mgr.addState(STREAM1_MESSAGE1); + mgr.addState(STREAM1_MESSAGE2); + mgr.addState(STREAM2_MESSAGE1); + + // verify the LAST message is returned. + assertEquals(new LinkedList<>(List.of(STREAM1_MESSAGE2, STREAM2_MESSAGE1)), mgr.listPending()); + assertTrue(mgr.listFlushed().isEmpty()); + assertTrue(mgr.listCommitted().isEmpty()); + } + + @Test + void testFlushed() { + mgr.addState(STREAM1_MESSAGE1); + mgr.addState(STREAM1_MESSAGE2); + mgr.addState(STREAM2_MESSAGE1); + mgr.markPendingAsFlushed(); + + assertTrue(mgr.listPending().isEmpty()); + assertEquals(new LinkedList<>(List.of(STREAM1_MESSAGE2, STREAM2_MESSAGE1)), mgr.listFlushed()); + assertTrue(mgr.listCommitted().isEmpty()); + + // verify that multiple calls to markPendingAsFlushed overwrite old states + mgr.addState(STREAM1_MESSAGE1); + mgr.markPendingAsFlushed(); + mgr.markPendingAsFlushed(); + + assertTrue(mgr.listPending().isEmpty()); + assertEquals(new LinkedList<>(List.of(STREAM1_MESSAGE1, STREAM2_MESSAGE1)), mgr.listFlushed()); + assertTrue(mgr.listCommitted().isEmpty()); + } + + @Test + void testCommitted() { + mgr.addState(STREAM1_MESSAGE1); + mgr.addState(STREAM1_MESSAGE2); + mgr.addState(STREAM2_MESSAGE1); + mgr.markPendingAsFlushed(); + mgr.markFlushedAsCommitted(); + + assertTrue(mgr.listPending().isEmpty()); + assertTrue(mgr.listFlushed().isEmpty()); + assertEquals(new LinkedList<>(List.of(STREAM1_MESSAGE2, STREAM2_MESSAGE1)), mgr.listCommitted()); + + // verify that multiple calls to markFlushedAsCommitted overwrite old states + mgr.addState(STREAM1_MESSAGE1); + mgr.markPendingAsFlushed(); + mgr.markFlushedAsCommitted(); + mgr.markFlushedAsCommitted(); + + assertTrue(mgr.listPending().isEmpty()); + assertTrue(mgr.listFlushed().isEmpty()); + assertEquals(new LinkedList<>(List.of(STREAM1_MESSAGE1, STREAM2_MESSAGE1)), mgr.listCommitted()); + } + +} diff --git a/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/destination/record_buffer/InMemoryRecordBufferingStrategyTest.java b/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/destination/record_buffer/InMemoryRecordBufferingStrategyTest.java index bc1029f95293..330b3c998e11 100644 --- a/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/destination/record_buffer/InMemoryRecordBufferingStrategyTest.java +++ b/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/destination/record_buffer/InMemoryRecordBufferingStrategyTest.java @@ -4,12 +4,13 @@ package io.airbyte.integrations.destination.record_buffer; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.concurrency.VoidCallable; import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.destination.buffered_stream_consumer.RecordWriter; @@ -25,6 +26,7 @@ public class InMemoryRecordBufferingStrategyTest { // instances private static final int MAX_QUEUE_SIZE_IN_BYTES = 130; + @SuppressWarnings("unchecked") private final RecordWriter recordWriter = mock(RecordWriter.class); @Test @@ -36,17 +38,12 @@ public void testBuffering() throws Exception { final AirbyteMessage message2 = generateMessage(stream2); final AirbyteMessage message3 = generateMessage(stream2); final AirbyteMessage message4 = generateMessage(stream2); - final VoidCallable hook = mock(VoidCallable.class); - buffering.registerFlushAllEventHook(hook); - buffering.addRecord(stream1, message1); - buffering.addRecord(stream2, message2); + assertFalse(buffering.addRecord(stream1, message1)); + assertFalse(buffering.addRecord(stream2, message2)); // Buffer still has room - verify(hook, times(0)).call(); - - buffering.addRecord(stream2, message3); + assertTrue(buffering.addRecord(stream2, message3)); // Buffer limit reach, flushing all messages so far before adding the new incoming one - verify(hook, times(1)).call(); verify(recordWriter, times(1)).accept(stream1, List.of(message1.getRecord())); verify(recordWriter, times(1)).accept(stream2, List.of(message2.getRecord())); @@ -54,7 +51,6 @@ public void testBuffering() throws Exception { // force flush to terminate test buffering.flushAll(); - verify(hook, times(2)).call(); verify(recordWriter, times(1)).accept(stream2, List.of(message3.getRecord(), message4.getRecord())); } diff --git a/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/destination/record_buffer/SerializedBufferingStrategyTest.java b/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/destination/record_buffer/SerializedBufferingStrategyTest.java index 397d09e97dad..2de320114ebe 100644 --- a/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/destination/record_buffer/SerializedBufferingStrategyTest.java +++ b/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/destination/record_buffer/SerializedBufferingStrategyTest.java @@ -4,7 +4,9 @@ package io.airbyte.integrations.destination.record_buffer; +import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; @@ -12,7 +14,6 @@ import static org.mockito.Mockito.when; import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.concurrency.VoidCallable; import io.airbyte.commons.functional.CheckedBiConsumer; import io.airbyte.commons.functional.CheckedBiFunction; import io.airbyte.commons.json.Jsons; @@ -37,9 +38,9 @@ public class SerializedBufferingStrategyTest { private static final long MAX_PER_STREAM_BUFFER_SIZE_BYTES = 21L; private final ConfiguredAirbyteCatalog catalog = mock(ConfiguredAirbyteCatalog.class); + @SuppressWarnings("unchecked") private final CheckedBiConsumer perStreamFlushHook = mock(CheckedBiConsumer.class); - private final VoidCallable flushAllHook = mock(VoidCallable.class); private final SerializableBuffer recordWriter1 = mock(SerializableBuffer.class); private final SerializableBuffer recordWriter2 = mock(SerializableBuffer.class); @@ -73,34 +74,30 @@ public void testPerStreamThresholdFlush() throws Exception { final AirbyteMessage message3 = generateMessage(stream2); final AirbyteMessage message4 = generateMessage(stream2); final AirbyteMessage message5 = generateMessage(stream2); - buffering.registerFlushAllEventHook(flushAllHook); when(recordWriter1.getByteCount()).thenReturn(10L); // one record in recordWriter1 - buffering.addRecord(stream1, message1); + assertFalse(buffering.addRecord(stream1, message1)); when(recordWriter2.getByteCount()).thenReturn(10L); // one record in recordWriter2 - buffering.addRecord(stream2, message2); + assertFalse(buffering.addRecord(stream2, message2)); // Total and per stream Buffers still have room - verify(flushAllHook, times(0)).call(); verify(perStreamFlushHook, times(0)).accept(stream1, recordWriter1); verify(perStreamFlushHook, times(0)).accept(stream2, recordWriter2); when(recordWriter2.getByteCount()).thenReturn(20L); // second record in recordWriter2 - buffering.addRecord(stream2, message3); + assertFalse(buffering.addRecord(stream2, message3)); when(recordWriter2.getByteCount()).thenReturn(30L); // third record in recordWriter2 - buffering.addRecord(stream2, message4); + assertFalse(buffering.addRecord(stream2, message4)); // The buffer limit is now reached for stream2, flushing that single stream only - verify(flushAllHook, times(0)).call(); verify(perStreamFlushHook, times(0)).accept(stream1, recordWriter1); verify(perStreamFlushHook, times(1)).accept(stream2, recordWriter2); when(recordWriter2.getByteCount()).thenReturn(10L); // back to one record in recordWriter2 - buffering.addRecord(stream2, message5); + assertFalse(buffering.addRecord(stream2, message5)); // force flush to terminate test buffering.flushAll(); - verify(flushAllHook, times(1)).call(); verify(perStreamFlushHook, times(1)).accept(stream1, recordWriter1); verify(perStreamFlushHook, times(2)).accept(stream2, recordWriter2); } @@ -119,31 +116,27 @@ public void testTotalStreamThresholdFlush() throws Exception { final AirbyteMessage message4 = generateMessage(stream1); final AirbyteMessage message5 = generateMessage(stream2); final AirbyteMessage message6 = generateMessage(stream3); - buffering.registerFlushAllEventHook(flushAllHook); - buffering.addRecord(stream1, message1); - buffering.addRecord(stream2, message2); + assertFalse(buffering.addRecord(stream1, message1)); + assertFalse(buffering.addRecord(stream2, message2)); // Total and per stream Buffers still have room - verify(flushAllHook, times(0)).call(); verify(perStreamFlushHook, times(0)).accept(stream1, recordWriter1); verify(perStreamFlushHook, times(0)).accept(stream2, recordWriter2); verify(perStreamFlushHook, times(0)).accept(stream3, recordWriter3); - buffering.addRecord(stream3, message3); + assertFalse(buffering.addRecord(stream3, message3)); when(recordWriter1.getByteCount()).thenReturn(20L); // second record in recordWriter1 - buffering.addRecord(stream1, message4); + assertFalse(buffering.addRecord(stream1, message4)); when(recordWriter2.getByteCount()).thenReturn(20L); // second record in recordWriter2 - buffering.addRecord(stream2, message5); + assertTrue(buffering.addRecord(stream2, message5)); // Buffer limit reached for total streams, flushing all streams - verify(flushAllHook, times(1)).call(); verify(perStreamFlushHook, times(1)).accept(stream1, recordWriter1); verify(perStreamFlushHook, times(1)).accept(stream2, recordWriter2); verify(perStreamFlushHook, times(1)).accept(stream3, recordWriter3); - buffering.addRecord(stream3, message6); + assertFalse(buffering.addRecord(stream3, message6)); // force flush to terminate test buffering.flushAll(); - verify(flushAllHook, times(2)).call(); verify(perStreamFlushHook, times(1)).accept(stream1, recordWriter1); verify(perStreamFlushHook, times(1)).accept(stream2, recordWriter2); verify(perStreamFlushHook, times(2)).accept(stream3, recordWriter3); @@ -162,29 +155,25 @@ public void testConcurrentStreamThresholdFlush() throws Exception { final AirbyteMessage message3 = generateMessage(stream3); final AirbyteMessage message4 = generateMessage(stream4); final AirbyteMessage message5 = generateMessage(stream1); - buffering.registerFlushAllEventHook(flushAllHook); - buffering.addRecord(stream1, message1); - buffering.addRecord(stream2, message2); - buffering.addRecord(stream3, message3); + assertFalse(buffering.addRecord(stream1, message1)); + assertFalse(buffering.addRecord(stream2, message2)); + assertFalse(buffering.addRecord(stream3, message3)); // Total and per stream Buffers still have room - verify(flushAllHook, times(0)).call(); verify(perStreamFlushHook, times(0)).accept(stream1, recordWriter1); verify(perStreamFlushHook, times(0)).accept(stream2, recordWriter2); verify(perStreamFlushHook, times(0)).accept(stream3, recordWriter3); - buffering.addRecord(stream4, message4); + assertTrue(buffering.addRecord(stream4, message4)); // Buffer limit reached for concurrent streams, flushing all streams - verify(flushAllHook, times(1)).call(); verify(perStreamFlushHook, times(1)).accept(stream1, recordWriter1); verify(perStreamFlushHook, times(1)).accept(stream2, recordWriter2); verify(perStreamFlushHook, times(1)).accept(stream3, recordWriter3); verify(perStreamFlushHook, times(1)).accept(stream4, recordWriter4); - buffering.addRecord(stream1, message5); + assertFalse(buffering.addRecord(stream1, message5)); // force flush to terminate test buffering.flushAll(); - verify(flushAllHook, times(2)).call(); verify(perStreamFlushHook, times(2)).accept(stream1, recordWriter1); verify(perStreamFlushHook, times(1)).accept(stream2, recordWriter2); verify(perStreamFlushHook, times(1)).accept(stream3, recordWriter3); diff --git a/airbyte-integrations/bases/base-normalization/Dockerfile b/airbyte-integrations/bases/base-normalization/Dockerfile index 7485fdd1bed5..a0d6d3a80faf 100644 --- a/airbyte-integrations/bases/base-normalization/Dockerfile +++ b/airbyte-integrations/bases/base-normalization/Dockerfile @@ -28,5 +28,5 @@ WORKDIR /airbyte ENV AIRBYTE_ENTRYPOINT "/airbyte/entrypoint.sh" ENTRYPOINT ["/airbyte/entrypoint.sh"] -LABEL io.airbyte.version=0.2.1 +LABEL io.airbyte.version=0.2.6 LABEL io.airbyte.name=airbyte/normalization diff --git a/airbyte-integrations/bases/base-normalization/README.md b/airbyte-integrations/bases/base-normalization/README.md index 4dfa621ca0a8..bfa9ada93db4 100644 --- a/airbyte-integrations/bases/base-normalization/README.md +++ b/airbyte-integrations/bases/base-normalization/README.md @@ -1,11 +1,170 @@ # Normalization +* [Normalization](#normalization) + * [Under the hood](#under-the-hood) + * [Incremental updates with dedup-history sync mode](#incremental-updates-with-dedup-history-sync-mode) + * [Developer workflow](#developer-workflow) + * [Setting up your environment](#setting-up-your-environment) + * [Running dbt](#running-dbt) + * [Testing normalization](#testing-normalization) + * [Build & Activate Virtual Environment and install dependencies](#build--activate-virtual-environment-and-install-dependencies) + * [Unit Tests](#unit-tests) + * [test_transform_config.py:](#test_transform_configpy) + * [test_stream_processor.py and test_table_name_registry.py:](#test_stream_processorpy-and-test_table_name_registrypy) + * [test_destination_name_transformer.py:](#test_destination_name_transformerpy) + * [Integration Tests](#integration-tests) + * [Integration Tests Definitions for test_ephemeral.py:](#integration-tests-definitions-for-test_ephemeralpy) + * [Integration Tests Definitions for test_normalization.py:](#integration-tests-definitions-for-test_normalizationpy) + * [README.md:](#readmemd) + * [Integration Test Data Input:](#integration-test-data-input) + * [data_input/catalog.json:](#data_inputcatalogjson) + * [data_input/messages.txt:](#data_inputmessagestxt) + * [data_input/replace_identifiers.json:](#data_inputreplace_identifiersjson) + * [Integration Test Execution Flow:](#integration-test-execution-flow) + * [Integration Test Checks:](#integration-test-checks) + * [dbt schema tests:](#dbt-schema-tests) + * [dbt data tests:](#dbt-data-tests) + * [Notes using dbt seeds:](#notes-using-dbt-seeds) + * [Debug dbt operations with local database](#debug-dbt-operations-with-local-database) + * [Standard Destination Tests](#standard-destination-tests) + * [Acceptance Tests](#acceptance-tests) + Related documentation on normalization is available here: -- [architecture / Basic Normalization](../../../docs/understanding-airbyte/basic-normalization.md) +* [architecture / Basic Normalization](../../../docs/understanding-airbyte/basic-normalization.md) * [tutorials / Custom dbt normalization](../../../docs/operator-guides/transformation-and-normalization/transformations-with-dbt.md) -# Testing normalization +## Under the hood + +Normalization has two Python modules: +* `transform_config` parses the destination connector config and generates a profile.yml file, + which configures how dbt will connect to the destination database. +* `transform_catalog` parses the connection's catalog and generates a dbt_project.yml file, + which configures the models that dbt will run and how they should be materialized. + +`entrypoint.sh` (the entrypoint to normalization's Docker image) invokes these two modules, then calls `dbt run` on their output. + +### Incremental updates with dedup-history sync mode + +When generating the final table, we need to pull data from the SCD model. +A naive implementation would require reading the entire SCD table and completely regenerating the final table on each run. +This is obviously inefficient, so we instead use dbt's [incremental materialization mode](https://docs.getdbt.com/docs/building-a-dbt-project/building-models/configuring-incremental-models). +At each stage of the dbt pipeline, normalization will query the target table for the newest `_airbyte_emitted_at` value. +Then we only need to find records from the source table with `_airbyte_emitted_at` greater than or equal to that value +(equal to is necessary in case a previous normalization run was interrupted). + +This handles the two error scenarios quite cleanly: +* If a sync fails but succeeds after a retry, such that the first attempt commits some records and the retry commits a superset + of those records, then normalization will see that the SCD table has none of those records. The SCD model has a deduping stage, + which removes the records which were synced multiple times. +* If normalization fails partway through, such that (for example) the SCD model is updated but the final table is not, and then the sync + is retried, then the source will not re-emit any old records (because the destination will have emitted a state message ack-ing + all of the records). If the retry emits some new records, then normalization will append them to the SCD table as usual + (because, from the SCD's point of view, this is just a normal sync). Then the final table's latest `__airbyte_emitted_at` + will be older than the original attempt, so it will pull both the new records _and_ the first attempt's records from the SCD table. + +## Developer workflow + +At a high level, this is the recommended workflow for updating base-normalization: +1. Manually edit the models in `integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated`. + Run `dbt compile` and manually execute the SQL queries. This requires manual setup and validation, but allows you to quickly experiment + with different inputs. + 1. You can substitute your preferred database/warehouse. This document will use Postgres because it's easy to set up. +1. Run `dbt run` and verify that it generates the data correctly. +1. Once `dbt run` succeeds, edit `stream_processor.py` until it generates the models you hand-wrote in step 1. +1. Run the `test_normalization[DestinationType.POSTGRES-test_simple_streams]` integration test case. +1. Run the full [integration test suite](#integration-tests). +1. Commit the changes in `integration_tests/normalization_test_output`. + +### Setting up your environment + +If you have a fully-featured Python dev environment, you can just set a breakpoint at [this line]([integration_tests/test_normalization.py#L105](https://github.com/airbytehq/airbyte/blob/17ee3ad44ff71164765b97ff439c7ffd51bf9bfe/airbyte-integrations/bases/base-normalization/integration_tests/test_normalization.py#L108)) +and run the `test_normalization[DestinationType.POSTGRES-test_simple_streams]` test case. You can terminate the run after it hits the +breakpoint. This will start Postgres in a Docker container with some prepopulated data and configure profiles.yml to match the container. + +Otherwise, you can run this command: +```shell +docker run \ + --rm \ + --name "normalization_dev_postgres" \ + -e "integration-tests" \ + -e "integration-tests" \ + -p "9001:5432" \ + -d \ + marcosmarxm/postgres-ssl:dev \ + -c ssl=on \ + -c ssl_cert_file=/var/lib/postgresql/server.crt \ + -c ssl_key_file=/var/lib/postgresql/server.key \ +``` + +Then you'll need to edit `integration_tests/normalization_test_output/postgres/test_simple_streams/profiles.yml` and set the port to 9001. + +If you manually start an external Postgres instance (or whatever destination you're working on), you can set the [`NORMALIZATION_TEST_POSTGRES_DB_PORT`](https://github.com/airbytehq/airbyte/blob/8ed3fb5379bf5a93d011a78a3be435cf9de8ab74/airbyte-integrations/bases/base-normalization/integration_tests/dbt_integration_test.py#L26) +variable to run tests against that instance. + +### Running dbt + +Once you have a database available, you can run dbt commands. We recommend running dbt from inside the `airbyte/normalization:dev` image. +This saves you the effort of installing dbt and reconfiguring dbt_project.yml. You should build the image locally with `./gradlew :airbyte-integrations:bases:base-normalization:airbyteDocker`. + +First, `cd integration_tests/normalization_test_output/postgres/test_simple_streams`. Then install dbt's dependencies: +```shell +docker run \ + --rm \ + --init \ + -v $(pwd):/workspace \ + -v $(pwd)/build:/build \ + -v $(pwd)/logs:/logs \ + -v $(pwd)/build/dbt_packages/:/dbt \ + --entrypoint /usr/local/bin/dbt \ + --network host \ + -i airbyte/normalization:dev \ + deps \ + --profiles-dir=/workspace \ + --project-dir=/workspace +``` + +You should be able to run `dbt compile` now: +```shell +docker run \ + --rm \ + --init \ + -v $(pwd):/workspace \ + -v $(pwd)/build:/build \ + -v $(pwd)/logs:/logs \ + -v $(pwd)/build/dbt_packages/:/dbt \ + --entrypoint /usr/local/bin/dbt \ + --network host \ + -i airbyte/normalization:dev \ + compile \ + --profiles-dir=/workspace \ + --project-dir=/workspace +``` + +This will modify the files in `build/compiled/airbyte_utils/models/generated`. +For example, if you edit `models/generated/airbyte_incremental/scd/test_normalization/dedup_cdc_excluded_scd.sql`, then after compiling, +you can see the results in `build/compiled/airbyte_utils/models/generated/airbyte_incremental/scd/test_normalization/dedup_cdc_excluded_scd.sql`. + +You can also use `dbt run` to have dbt actually execute your models: +```shell +docker run \ + --rm \ + --init \ + -v $(pwd):/workspace \ + -v $(pwd)/build:/build \ + -v $(pwd)/logs:/logs \ + -v $(pwd)/build/dbt_packages/:/dbt \ + --entrypoint /usr/local/bin/dbt \ + --network host \ + -i airbyte/normalization:dev \ + run \ + --profiles-dir=/workspace \ + --project-dir=/workspace +``` +Like `dbt compile`, this will modify the files in `build/compiled/airbyte_utils/models/generated`. It will also modify the files in +`build/run/airbyte_utils/models/generated`. + +## Testing normalization Below are short descriptions of the kind of tests that may be affected by changes to the normalization code. @@ -28,7 +187,7 @@ used for editable installs (`pip install -e`) to pull in Python dependencies fro If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything should work as you expect. -## Unit Tests +### Unit Tests Unit tests are automatically included when building the normalization project. But you could invoke them explicitly by running the following commands for example: @@ -69,22 +228,22 @@ These Unit tests checks implementation of specific rules of SQL identifier namin The specifications rules of each destinations are detailed in the corresponding docs, especially on the allowed characters, if quotes are needed or not, and the length limitations: -- [bigquery](../../../docs/integrations/destinations/bigquery.md) -- [postgres](../../../docs/integrations/destinations/postgres.md) -- [redshift](../../../docs/integrations/destinations/redshift.md) -- [snowflake](../../../docs/integrations/destinations/snowflake.md) -- [mysql](../../../docs/integrations/destinations/mysql.md) -- [oracle](../../../docs/integrations/destinations/oracle.md) -- [mssql](../../../docs/integrations/destinations/mssql.md) +* [bigquery](../../../docs/integrations/destinations/bigquery.md) +* [postgres](../../../docs/integrations/destinations/postgres.md) +* [redshift](../../../docs/integrations/destinations/redshift.md) +* [snowflake](../../../docs/integrations/destinations/snowflake.md) +* [mysql](../../../docs/integrations/destinations/mysql.md) +* [oracle](../../../docs/integrations/destinations/oracle.md) +* [mssql](../../../docs/integrations/destinations/mssql.md) Rules about truncations, for example for both of these strings which are too long for the postgres 64 limit: -- `Aaaa_Bbbb_Cccc_Dddd_Eeee_Ffff_Gggg_Hhhh_Iiii` -- `Aaaa_Bbbb_Cccc_Dddd_a_very_long_name_Ffff_Gggg_Hhhh_Iiii` +* `Aaaa_Bbbb_Cccc_Dddd_Eeee_Ffff_Gggg_Hhhh_Iiii` +* `Aaaa_Bbbb_Cccc_Dddd_a_very_long_name_Ffff_Gggg_Hhhh_Iiii` Deciding on how to truncate (in the middle) are being verified in these tests. In this instance, both strings ends up as: -- `Aaaa_Bbbb_Cccc_Dddd___e_Ffff_Gggg_Hhhh_Iiii` +* `Aaaa_Bbbb_Cccc_Dddd___e_Ffff_Gggg_Hhhh_Iiii` The truncate operation gets rid of characters in the middle of the string to preserve the start and end characters as it may contain more useful information in table naming. However the final @@ -94,7 +253,7 @@ Note that dealing with such collisions is not part of `destination_name_transfor `stream_processor` since one is focused on destination conventions and the other on putting together identifier names from streams and catalogs. -## Integration Tests +### Integration Tests Look at the `./setup/*.md` (e.g. `./setup/snowflake.md`) for how to set up integration environments. @@ -123,25 +282,25 @@ Note that these tests are connecting and processing data on top of real data war Therefore, valid credentials files are expected to be injected in the `secrets/` folder in order to run (not included in git repository). -This is usually automatically done by the CI thanks to the `tools/bin/ci_credentials.sh` script or you can +This is usually automatically done by the CI thanks to the `tools/bin/ci_credentials.sh` script or you can re-use the `destination_config.json` passed to destination connectors. -As normalization supports more and more destinations, tests are relying on an increasing number of destinations. -As a result, it is possible that the docker garbage collector is triggered to wipe "unused" docker images while the -integration tests for normalization are running. Thus, if you encounter errors about a connector's docker image not being +As normalization supports more and more destinations, tests are relying on an increasing number of destinations. +As a result, it is possible that the docker garbage collector is triggered to wipe "unused" docker images while the +integration tests for normalization are running. Thus, if you encounter errors about a connector's docker image not being present locally (even though it was built beforehand), make sure to increase the docker image storage size of your docker engine ("defaultKeepStorage" for mac for example). -### Integration Tests Definitions for test_ephemeral.py: +#### Integration Tests Definitions for test_ephemeral.py: The test here focus on benchmarking the "ephemeral" materialization mode of dbt. Depending on the number of columns in a catalog, this may throw exceptions and fail. This test ensures that we support reasonable number of columns in destination tables. For example, known limitations that are now supported were: -- Ephemeral materialization with some generated models break with more than 490 columns with "maximum recursion depth exceeded", we now automatically switch to a little more scalable mode when generating dbt models by using views materialization. -- The tests are currently checking that at least a reasonably large number (1500) of columns can complete successfully. +* Ephemeral materialization with some generated models break with more than 490 columns with "maximum recursion depth exceeded", we now automatically switch to a little more scalable mode when generating dbt models by using views materialization. +* The tests are currently checking that at least a reasonably large number (1500) of columns can complete successfully. However, limits on the destination still exists and can break for higher number of columns... -### Integration Tests Definitions for test_normalization.py: +#### Integration Tests Definitions for test_normalization.py: Some test suites can be selected to be versioned control in Airbyte git repository (or not). This is useful to see direct impacts of code changes on downstream files generated or compiled @@ -175,33 +334,36 @@ For example, below, we would have 2 different tests "suites" with this hierarchy ā”œā”€ā”€ dbt_schema_tests/ ā””ā”€ā”€ README.md -#### README.md: +##### README.md: Each test suite should have an optional `README.md` to include further details and descriptions of what the test is trying to verify and how it is specifically built. -### Integration Test Data Input: +#### Integration Test Data Input: -#### data_input/catalog.json: +##### data_input/catalog.json: The `catalog.json` is the main input for normalization from which the dbt models files are being generated from as it describes in JSON Schema format what the data structure is. -#### data_input/messages.txt: +##### data_input/messages.txt: The `messages.txt` are serialized Airbyte JSON records that should be sent to the destination as if they were transmitted by a source. In this integration test, the files is read and "cat" through to the docker image of each destination connectors to populate `_airbyte_raw_tables`. These tables are finally used as input data for dbt to run from. -#### data_input/replace_identifiers.json: +Note that `test_simple_streams` has additional message files, each representing a separate sync +(`messages_incremental.txt` and `messages_schema_change.txt`). + +##### data_input/replace_identifiers.json: The `replace_identifiers.json` contains maps of string patterns and values to replace in the `dbt_schema_tests` and `dbt_data_tests` files to handle cross database compatibility. Note that an additional step is added before replacing identifiers to change capitalization of identifiers in those tests files. (to uppercase on snowflake and lowercase on redshift). -### Integration Test Execution Flow: +#### Integration Test Execution Flow: These integration tests are run against all destinations that dbt can be executed on. So, for each target destination, the steps run by the tests are: @@ -212,61 +374,63 @@ So, for each target destination, the steps run by the tests are: `messages.txt` file as data input. 4. Run Normalization step to generate dbt models files from `catalog.json` input file. 5. Execute dbt cli command: `dbt run` from the test workspace folder to compile generated models files - - from `models/generated/` folder - - into `../build/(compiled|run)/airbyte_utils/models/generated/` folder - - The final "run" SQL files are also copied (for archiving) to `final/` folder by the test script. + * from `models/generated/` folder + * into `../build/(compiled|run)/airbyte_utils/models/generated/` folder + * The final "run" SQL files are also copied (for archiving) to `final/` folder by the test script. 6. Deploy the `schema_tests` and `data_tests` files into the test workspace folder. 7. Execute dbt cli command: `dbt tests` from the test workspace folder to run verifications and checks with dbt. 8. Optional checks (nothing for the moment) Note that the tests are using the normalization code from the python files directly, so it is not necessary to rebuild the docker images -in between when iterating on the code base. However, dbt cli and destination connectors are invoked thanks to the dev docker images. +in between when iterating on the code base. However, dbt cli and destination connectors are invoked via the dev docker images. +This means that if your `airbyte/normalization:dev` image doesn't have a working dbt installation, tests _will_ fail. +Similarly, if your `destination-xyz:dev` image doesn't work, then the base-normalization integration tests will fail. -### Integration Test Checks: +#### Integration Test Checks: -#### dbt schema tests: +##### dbt schema tests: dbt allows out of the box to configure some tests as properties for an existing model (or source, seed, or snapshot). This can be done in yaml format as described in the following documentation pages: -- [dbt schema-tests](https://docs.getdbt.com/docs/building-a-dbt-project/tests#schema-tests) -- [custom schema test](https://docs.getdbt.com/docs/guides/writing-custom-schema-tests) -- [dbt expectations](https://github.com/calogica/dbt-expectations) +* [dbt schema-tests](https://docs.getdbt.com/docs/building-a-dbt-project/tests#schema-tests) +* [custom schema test](https://docs.getdbt.com/docs/guides/writing-custom-schema-tests) +* [dbt expectations](https://github.com/calogica/dbt-expectations) We are leveraging these capabilities in these integration tests to verify some relationships in our generated tables on the destinations. -#### dbt data tests: +##### dbt data tests: Additionally, dbt also supports "data tests" which are specified as SQL queries. A data test is a select statement that returns 0 records when the test is successful. -- [dbt data-tests](https://docs.getdbt.com/docs/building-a-dbt-project/tests#data-tests) +* [dbt data-tests](https://docs.getdbt.com/docs/building-a-dbt-project/tests#data-tests) -#### Notes using dbt seeds: +##### Notes using dbt seeds: Because some functionalities are not stable enough on dbt side, it is difficult to properly use `dbt seed` commands to populate a set of expected data tables at the moment. Hopefully, this can be more easily be done in the future... Related issues to watch on dbt progress to improve this aspects: -- https://github.com/fishtown-analytics/dbt/issues/2959#issuecomment-747509782 -- https://medium.com/hashmapinc/unit-testing-on-dbt-models-using-a-static-test-dataset-in-snowflake-dfd35549b5e2 +* +* A nice improvement would be to add csv/json seed files as expected output data from tables. The integration tests would verify that the content of such tables in the destination would match these seed files or fail. -### Debug dbt operations with local database +#### Debug dbt operations with local database This only works for testing databases launched in local containers (e.g. postgres and mysql). -- In `dbt_integration_test.py`, comment out the `tear_down_db` method so that the relevant database container is not deleted. -- Find the name of the database container in the logs (e.g. by searching `Executing`). -- Connect to the container by running `docker exec -it bash` in the commandline. -- Connect to the database inside the container (e.g. `mysql -u root` for mysql). -- Test the generated dbt operations directly in the database. +* In `dbt_integration_test.py`, comment out the `tear_down_db` method so that the relevant database container is not deleted. +* Find the name of the database container in the logs (e.g. by searching `Executing`). +* Connect to the container by running `docker exec -it bash` in the commandline. +* Connect to the database inside the container (e.g. `mysql -u root` for mysql). +* Test the generated dbt operations directly in the database. -## Standard Destination Tests +### Standard Destination Tests Generally, to invoke standard destination tests, you run with gradle using: @@ -274,6 +438,6 @@ Generally, to invoke standard destination tests, you run with gradle using: For more details and options, you can also refer to the [testing connectors docs](../../../docs/connector-development/testing-connectors/README.md). -## Acceptance Tests +### Acceptance Tests Please refer to the [developing docs](../../../docs/contributing-to-airbyte/developing-locally.md) on how to run Acceptance Tests. diff --git a/airbyte-integrations/bases/base-normalization/dbt-project-template-mysql/dbt_project.yml b/airbyte-integrations/bases/base-normalization/dbt-project-template-mysql/dbt_project.yml index db791a568a0b..7116e6dc63d2 100755 --- a/airbyte-integrations/bases/base-normalization/dbt-project-template-mysql/dbt_project.yml +++ b/airbyte-integrations/bases/base-normalization/dbt-project-template-mysql/dbt_project.yml @@ -2,7 +2,7 @@ # the content will be overwritten by the transform function # Name your package! Package names should contain only lowercase characters -# and underscores. A good package name should reflect your organization's +# and underscores. A good package name should reflect your organization"s # name or the intended use of these models name: "airbyte_utils" version: "1.0" @@ -13,18 +13,18 @@ config-version: 2 profile: "normalize" # These configurations specify where dbt should look for different types of files. -# The `source-paths` config, for example, states that source models can be found -# in the "models/" directory. You probably won't need to change these! -source-paths: ["models"] +# The `model-paths` config, for example, states that source models can be found +# in the "models/" directory. You probably won"t need to change these! +model-paths: ["models"] docs-paths: ["docs"] analysis-paths: ["analysis"] test-paths: ["tests"] -data-paths: ["data"] +seed-paths: ["data"] macro-paths: ["macros"] target-path: "../build" # directory which will store compiled SQL files log-path: "../logs" # directory which will store DBT logs -modules-path: "/dbt" # directory which will store external DBT dependencies +packages-install-path: "/dbt" # directory which will store external DBT dependencies clean-targets: # directories to be removed by `dbt clean` - "build" @@ -37,7 +37,7 @@ quoting: schema: false identifier: true -# You can define configurations for models in the `source-paths` directory here. +# You can define configurations for models in the `model-paths` directory here. # Using these configurations, you can enable or disable models, change how they # are materialized, and more! models: diff --git a/airbyte-integrations/bases/base-normalization/dbt-project-template-mysql/packages.yml b/airbyte-integrations/bases/base-normalization/dbt-project-template-mysql/packages.yml index 13d4e69a45cb..33b4edd58c8c 100755 --- a/airbyte-integrations/bases/base-normalization/dbt-project-template-mysql/packages.yml +++ b/airbyte-integrations/bases/base-normalization/dbt-project-template-mysql/packages.yml @@ -2,4 +2,4 @@ packages: - git: "https://github.com/fishtown-analytics/dbt-utils.git" - revision: 0.6.4 + revision: 0.8.2 diff --git a/airbyte-integrations/bases/base-normalization/dbt-project-template/macros/clean_tmp_tables.sql b/airbyte-integrations/bases/base-normalization/dbt-project-template/macros/clean_tmp_tables.sql new file mode 100644 index 000000000000..46e2328745f1 --- /dev/null +++ b/airbyte-integrations/bases/base-normalization/dbt-project-template/macros/clean_tmp_tables.sql @@ -0,0 +1,19 @@ +{% macro clean_tmp_tables(schemas) -%} + {{ adapter.dispatch('clean_tmp_tables')(schemas) }} +{%- endmacro %} + +-- default +{% macro default__clean_tmp_tables(schemas) -%} + {% do exceptions.warn("\tINFO: CLEANING TEST LEFTOVERS IS NOT IMPLEMENTED FOR THIS DESTINATION. CONSIDER TO REMOVE TEST TABLES MANUALY.\n") %} +{%- endmacro %} + +-- for redshift +{% macro redshift__clean_tmp_tables(schemas) %} + {%- for tmp_schema in schemas -%} + {% do log("\tDROP SCHEMA IF EXISTS " ~ tmp_schema, info=True) %} + {%- set drop_query -%} + drop schema if exists {{ tmp_schema }} cascade; + {%- endset -%} + {%- do run_query(drop_query) -%} + {%- endfor -%} +{% endmacro %} \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/dbt-project-template/macros/cross_db_utils/concat.sql b/airbyte-integrations/bases/base-normalization/dbt-project-template/macros/cross_db_utils/concat.sql index 536cebc7b23c..ebd5d265985c 100644 --- a/airbyte-integrations/bases/base-normalization/dbt-project-template/macros/cross_db_utils/concat.sql +++ b/airbyte-integrations/bases/base-normalization/dbt-project-template/macros/cross_db_utils/concat.sql @@ -12,6 +12,11 @@ concat({{ fields|join(', ') }}) {%- endmacro %} +{% macro mysql__concat(fields) -%} + {#-- MySQL doesn't support the '||' operator as concatenation by default --#} + concat({{ fields|join(', ') }}) +{%- endmacro %} + {% macro sqlserver__concat(fields) -%} {#-- CONCAT() in SQL SERVER accepts from 2 to 254 arguments, we use batches for the main concat, to overcome the limit. --#} {% set concat_chunks = [] %} diff --git a/airbyte-integrations/bases/base-normalization/dbt-project-template/macros/cross_db_utils/json_operations.sql b/airbyte-integrations/bases/base-normalization/dbt-project-template/macros/cross_db_utils/json_operations.sql index e1e54439e657..29554485d330 100644 --- a/airbyte-integrations/bases/base-normalization/dbt-project-template/macros/cross_db_utils/json_operations.sql +++ b/airbyte-integrations/bases/base-normalization/dbt-project-template/macros/cross_db_utils/json_operations.sql @@ -236,3 +236,18 @@ {% macro clickhouse__json_extract_array(json_column, json_path_list, normalized_json_path) -%} JSONExtractArrayRaw(assumeNotNull({{ json_column }}), {{ format_json_path(json_path_list) }}) {%- endmacro %} + +{# json_extract_string_array ------------------------------------------------- #} + +{% macro json_extract_string_array(json_column, json_path_list, normalized_json_path) -%} + {{ adapter.dispatch('json_extract_string_array')(json_column, json_path_list, normalized_json_path) }} +{%- endmacro %} + +{% macro default__json_extract_string_array(json_column, json_path_list, normalized_json_path) -%} + {{ json_extract_array(json_column, json_path_list, normalized_json_path) }} +{%- endmacro %} + +# https://cloud.google.com/bigquery/docs/reference/standard-sql/json_functions#json_extract_string_array +{% macro bigquery__json_extract_string_array(json_column, json_path_list, normalized_json_path) -%} + json_extract_string_array({{ json_column }}, {{ format_json_path(normalized_json_path) }}) +{%- endmacro %} diff --git a/airbyte-integrations/bases/base-normalization/dbt-project-template/macros/incremental.sql b/airbyte-integrations/bases/base-normalization/dbt-project-template/macros/incremental.sql index f70b4798075c..86750a85ebcb 100644 --- a/airbyte-integrations/bases/base-normalization/dbt-project-template/macros/incremental.sql +++ b/airbyte-integrations/bases/base-normalization/dbt-project-template/macros/incremental.sql @@ -4,14 +4,14 @@ - incremental_clause controls the predicate to filter on new data to process incrementally #} -{% macro incremental_clause(col_emitted_at) -%} - {{ adapter.dispatch('incremental_clause')(col_emitted_at) }} +{% macro incremental_clause(col_emitted_at, tablename) -%} + {{ adapter.dispatch('incremental_clause')(col_emitted_at, tablename) }} {%- endmacro %} -{%- macro default__incremental_clause(col_emitted_at) -%} +{%- macro default__incremental_clause(col_emitted_at, tablename) -%} {% if is_incremental() %} and coalesce( - cast({{ col_emitted_at }} as {{ type_timestamp_with_timezone() }}) >= (select max(cast({{ col_emitted_at }} as {{ type_timestamp_with_timezone() }})) from {{ this }}), + cast({{ col_emitted_at }} as {{ type_timestamp_with_timezone() }}) >= (select max(cast({{ col_emitted_at }} as {{ type_timestamp_with_timezone() }})) from {{ tablename }}), {# -- if {{ col_emitted_at }} is NULL in either table, the previous comparison would evaluate to NULL, #} {# -- so we coalesce and make sure the row is always returned for incremental processing instead #} true) @@ -19,28 +19,28 @@ and coalesce( {%- endmacro -%} {# -- see https://on-systems.tech/113-beware-dbt-incremental-updates-against-snowflake-external-tables/ #} -{%- macro snowflake__incremental_clause(col_emitted_at) -%} +{%- macro snowflake__incremental_clause(col_emitted_at, tablename) -%} {% if is_incremental() %} - {% if get_max_normalized_cursor(col_emitted_at) %} + {% if get_max_normalized_cursor(col_emitted_at, tablename) %} and cast({{ col_emitted_at }} as {{ type_timestamp_with_timezone() }}) >= - cast('{{ get_max_normalized_cursor(col_emitted_at) }}' as {{ type_timestamp_with_timezone() }}) + cast('{{ get_max_normalized_cursor(col_emitted_at, tablename) }}' as {{ type_timestamp_with_timezone() }}) {% endif %} {% endif %} {%- endmacro -%} -{%- macro sqlserver__incremental_clause(col_emitted_at) -%} +{%- macro sqlserver__incremental_clause(col_emitted_at, tablename) -%} {% if is_incremental() %} -and ((select max(cast({{ col_emitted_at }} as {{ type_timestamp_with_timezone() }})) from {{ this }}) is null +and ((select max(cast({{ col_emitted_at }} as {{ type_timestamp_with_timezone() }})) from {{ tablename }}) is null or cast({{ col_emitted_at }} as {{ type_timestamp_with_timezone() }}) >= - (select max(cast({{ col_emitted_at }} as {{ type_timestamp_with_timezone() }})) from {{ this }})) + (select max(cast({{ col_emitted_at }} as {{ type_timestamp_with_timezone() }})) from {{ tablename }})) {% endif %} {%- endmacro -%} -{% macro get_max_normalized_cursor(col_emitted_at) %} +{% macro get_max_normalized_cursor(col_emitted_at, tablename) %} {% if execute and is_incremental() %} {% if env_var('INCREMENTAL_CURSOR', 'UNSET') == 'UNSET' %} {% set query %} - select max(cast({{ col_emitted_at }} as {{ type_timestamp_with_timezone() }})) from {{ this }} + select max(cast({{ col_emitted_at }} as {{ type_timestamp_with_timezone() }})) from {{ tablename }} {% endset %} {% set max_cursor = run_query(query).columns[0][0] %} {% do return(max_cursor) %} diff --git a/airbyte-integrations/bases/base-normalization/entrypoint.sh b/airbyte-integrations/bases/base-normalization/entrypoint.sh index 8cd64420e132..733a6fbe0db7 100755 --- a/airbyte-integrations/bases/base-normalization/entrypoint.sh +++ b/airbyte-integrations/bases/base-normalization/entrypoint.sh @@ -120,6 +120,7 @@ function main() { openssh "${PROJECT_DIR}/ssh.json" trap 'closessh' EXIT + set +e # allow script to continue running even if next commands fail to run properly # We don't run dbt 1.0.x on all destinations (because their plugins don't support it yet) # So we need to only pass `--event-buffer-size` if it's supported by DBT. check_dbt_event_buffer_size @@ -130,7 +131,6 @@ function main() { dbt_additional_args="" fi - set +e # allow script to continue running even if next commands fail to run properly # Run dbt to compile and execute the generated normalization models dbt ${dbt_additional_args} run --profiles-dir "${PROJECT_DIR}" --project-dir "${PROJECT_DIR}" DBT_EXIT_CODE=$? diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/dbt_integration_test.py b/airbyte-integrations/bases/base-normalization/integration_tests/dbt_integration_test.py index 844f41ece940..ead7e2ad0d0d 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/dbt_integration_test.py +++ b/airbyte-integrations/bases/base-normalization/integration_tests/dbt_integration_test.py @@ -5,6 +5,7 @@ import json import os +import pathlib import random import re import socket @@ -14,8 +15,9 @@ import threading import time from copy import copy -from typing import Any, Callable, Dict, List +from typing import Any, Callable, Dict, List, Union +import yaml from normalization.destination_type import DestinationType from normalization.transform_catalog.transform import read_yaml_config, write_yaml_config from normalization.transform_config.transform import TransformConfig @@ -132,10 +134,12 @@ def setup_mysql_db(self): "MYSQL_INITDB_SKIP_TZINFO=yes", "-e", f"MYSQL_DATABASE={config['database']}", + "-e", + "MYSQL_ROOT_HOST=%", "-p", f"{config['port']}:3306", "-d", - "mysql", + "mysql/mysql-server", ] print("Executing: ", " ".join(commands)) subprocess.call(commands) @@ -365,7 +369,8 @@ def writer(): line = input_data.readline() if not line: break - process.stdin.write(line) + if not line.startswith(b"#"): + process.stdin.write(line) process.stdin.close() thread = threading.Thread(target=writer) @@ -411,17 +416,23 @@ def dbt_run(self, destination_type: DestinationType, test_root_dir: str, force_f # Compile dbt models files into destination sql dialect, then run the transformation queries assert self.run_check_dbt_command(normalization_image, "run", test_root_dir, force_full_refresh) - @staticmethod - def run_check_dbt_command(normalization_image: str, command: str, cwd: str, force_full_refresh: bool = False) -> bool: + def dbt_run_macro(self, destination_type: DestinationType, test_root_dir: str, macro: str, macro_args: str = None): + """ + Run the dbt CLI to perform transformations on the test raw data in the destination, using independent macro. + """ + normalization_image: str = self.get_normalization_image(destination_type) + # Compile dbt models files into destination sql dialect, then run the transformation queries + assert self.run_dbt_run_operation(normalization_image, test_root_dir, macro, macro_args) + + def run_check_dbt_command(self, normalization_image: str, command: str, cwd: str, force_full_refresh: bool = False) -> bool: """ Run dbt subprocess while checking and counting for "ERROR", "FAIL" or "WARNING" printed in its outputs """ - if normalization_image.startswith("airbyte/normalization-oracle") or normalization_image.startswith("airbyte/normalization-mysql"): + if normalization_image.startswith("airbyte/normalization-oracle"): dbtAdditionalArgs = [] else: dbtAdditionalArgs = ["--event-buffer-size=10000"] - error_count = 0 commands = ( [ "docker", @@ -455,6 +466,45 @@ def run_check_dbt_command(normalization_image: str, command: str, cwd: str, forc command = f"{command} --full-refresh" print("Executing: ", " ".join(commands)) print(f"Equivalent to: dbt {command} --profiles-dir={cwd} --project-dir={cwd}") + return self.run_check_dbt_subprocess(commands, cwd) + + def run_dbt_run_operation(self, normalization_image: str, cwd: str, macro: str, macro_args: str = None) -> bool: + """ + Run dbt subprocess while checking and counting for "ERROR", "FAIL" or "WARNING" printed in its outputs + """ + args = ["--args", macro_args] if macro_args else [] + commands = ( + [ + "docker", + "run", + "--rm", + "--init", + "-v", + f"{cwd}:/workspace", + "-v", + f"{cwd}/build:/build", + "-v", + f"{cwd}/logs:/logs", + "-v", + f"{cwd}/build/dbt_packages:/dbt", + "--network", + "host", + "--entrypoint", + "/usr/local/bin/dbt", + "-i", + normalization_image, + ] + + ["run-operation", macro] + + args + + ["--profiles-dir=/workspace", "--project-dir=/workspace"] + ) + + print("Executing: ", " ".join(commands)) + print(f"Equivalent to: dbt run-operation {macro} --args {macro_args} --profiles-dir={cwd} --project-dir={cwd}") + return self.run_check_dbt_subprocess(commands, cwd) + + def run_check_dbt_subprocess(self, commands: list, cwd: str): + error_count = 0 with open(os.path.join(cwd, "dbt_output.log"), "ab") as f: process = subprocess.Popen(commands, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=os.environ) for line in iter(lambda: process.stdout.readline(), b""): @@ -549,3 +599,94 @@ def update_yaml_file(filename: str, callback: Callable): updated, config = callback(config) if updated: write_yaml_config(config, filename) + + def clean_tmp_tables( + self, + destination_type: Union[DestinationType, List[DestinationType]], + test_type: str, + tmp_folders: list = None, + git_versioned_tests: list = None, + ): + """ + Cleans-up all temporary schemas created during the test session. + It parses the provided tmp_folders: List[str] or uses `git_versioned_tests` to find sources.yml files generated for the tests. + It gets target schemas created by the tests and removes them using custom scenario specified in + `dbt-project-template/macros/clean_tmp_tables.sql` macro. + + REQUIREMENTS: + 1) Idealy, the schemas should have unique names like: test_normalization_ to avoid conflicts. + 2) The `clean_tmp_tables.sql` macro should have the specific macro for target destination to proceed. + + INPUT ARGUMENTS: + :: destination_type : either single destination or list of destinations + :: test_type: either "ephemeral" or "normalization" should be supplied. + :: tmp_folders: should be supplied if test_type = "ephemeral", to get schemas from /build/normalization_test_output folders + :: git_versioned_tests: should be supplied if test_type = "normalization", to get schemas from integration_tests/normalization_test_output folders + + EXAMPLE: + clean_up_args = { + "destination_type": [ DestinationType.REDSHIFT, DestinationType.POSTGRES, ... ] + "test_type": "normalization", + "git_versioned_tests": git_versioned_tests, + } + """ + + path_to_sources: str = "/models/generated/sources.yml" + test_folders: dict = {} + source_files: dict = {} + schemas_to_remove: dict = {} + + # collecting information about tmp_tables created for the test for each destination + for destination in destination_type: + test_folders[destination.value] = [] + source_files[destination.value] = [] + schemas_to_remove[destination.value] = [] + + # based on test_type select path to source files + if test_type == "ephemeral": + if not tmp_folders: + raise TypeError("`tmp_folders` arg is not provided.") + for folder in tmp_folders: + if destination.value in folder: + test_folders[destination.value].append(folder) + source_files[destination.value].append(f"{folder}{path_to_sources}") + elif test_type == "normalization": + if not git_versioned_tests: + raise TypeError("`git_versioned_tests` arg is not provided.") + base_path = f"{pathlib.Path().absolute()}/integration_tests/normalization_test_output" + for test in git_versioned_tests: + test_root_dir: str = f"{base_path}/{destination.value}/{test}" + test_folders[destination.value].append(test_root_dir) + source_files[destination.value].append(f"{test_root_dir}{path_to_sources}") + else: + raise TypeError(f"\n`test_type`: {test_type} is not a registered, use `ephemeral` or `normalization` instead.\n") + + # parse source.yml files from test folders to get schemas and table names created for the tests + for file in source_files[destination.value]: + source_yml = {} + try: + with open(file, "r") as source_file: + source_yml = yaml.safe_load(source_file) + except FileNotFoundError: + print(f"\n{destination.value}: {file} doesn't exist, consider to remove any temp_tables and schemas manually!\n") + pass + test_sources: list = source_yml.get("sources", []) if source_yml else [] + + for source in test_sources: + target_schema: str = source.get("name") + if target_schema not in schemas_to_remove[destination.value]: + schemas_to_remove[destination.value].append(target_schema) + # adding _airbyte_* tmp schemas to be removed + schemas_to_remove[destination.value].append(f"_airbyte_{target_schema}") + + # cleaning up tmp_tables generated by the tests + for destination in destination_type: + if not schemas_to_remove[destination.value]: + print(f"\n\t{destination.value.upper()} DESTINATION: SKIP CLEANING, NOTHING TO REMOVE.\n") + else: + print(f"\n\t{destination.value.upper()} DESTINATION: CLEANING LEFTOVERS...\n") + print(f"\t{schemas_to_remove[destination.value]}\n") + test_root_folder = test_folders[destination.value][0] + args = json.dumps({"schemas": schemas_to_remove[destination.value]}) + self.dbt_check(destination, test_root_folder) + self.dbt_run_macro(destination, test_root_folder, "clean_tmp_tables", args) diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/dbt_project.yml b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/dbt_project.yml index 7631ef356dc9..68ca41b91d53 100755 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/dbt_project.yml +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/dbt_project.yml @@ -1,45 +1,29 @@ -# This file is necessary to install dbt-utils with dbt deps -# the content will be overwritten by the transform function - -# Name your package! Package names should contain only lowercase characters -# and underscores. A good package name should reflect your organization's -# name or the intended use of these models -name: "airbyte_utils" +name: airbyte_utils version: "1.0" config-version: 2 - -# This setting configures which "profile" dbt uses for this project. Profiles contain -# database connection information, and should be configured in the ~/.dbt/profiles.yml file -profile: "normalize" - -# These configurations specify where dbt should look for different types of files. -# The `model-paths` config, for example, states that source models can be found -# in the "models/" directory. You probably won't need to change these! -model-paths: ["models"] -docs-paths: ["docs"] -analysis-paths: ["analysis"] -test-paths: ["tests"] -seed-paths: ["data"] -macro-paths: ["macros"] - -target-path: "../build" # directory which will store compiled SQL files -log-path: "../logs" # directory which will store DBT logs -packages-install-path: "/dbt" # directory which will store external DBT dependencies - -clean-targets: # directories to be removed by `dbt clean` - - "build" - - "dbt_modules" - +profile: normalize +model-paths: + - models +docs-paths: + - docs +analysis-paths: + - analysis +test-paths: + - tests +seed-paths: + - data +macro-paths: + - macros +target-path: ../build +log-path: ../logs +packages-install-path: /dbt +clean-targets: + - build + - dbt_modules quoting: database: true - # Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) - # all schemas should be unquoted schema: false identifier: true - -# You can define configurations for models in the `model-paths` directory here. -# Using these configurations, you can enable or disable models, change how they -# are materialized, and more! models: airbyte_utils: +materialized: table @@ -57,7 +41,77 @@ models: airbyte_views: +tags: airbyte_internal_views +materialized: view - dispatch: - macro_namespace: dbt_utils - search_order: ["airbyte_utils", "dbt_utils"] + search_order: + - airbyte_utils + - dbt_utils +vars: + json_column: _airbyte_data + models_to_source: + nested_stream_with_complex_columns_resulting_into_long_names_ab1: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_complex_columns_resulting_into_long_names_ab2: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_complex_columns_resulting_into_long_names_stg: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_complex_columns_resulting_into_long_names_scd: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_complex_columns_resulting_into_long_names: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + non_nested_stream_without_namespace_resulting_into_long_names_ab1: test_normalization._airbyte_raw_non_nested_stream_without_namespace_resulting_into_long_names + non_nested_stream_without_namespace_resulting_into_long_names_ab2: test_normalization._airbyte_raw_non_nested_stream_without_namespace_resulting_into_long_names + non_nested_stream_without_namespace_resulting_into_long_names_ab3: test_normalization._airbyte_raw_non_nested_stream_without_namespace_resulting_into_long_names + non_nested_stream_without_namespace_resulting_into_long_names: test_normalization._airbyte_raw_non_nested_stream_without_namespace_resulting_into_long_names + some_stream_that_was_empty_ab1: test_normalization._airbyte_raw_some_stream_that_was_empty + some_stream_that_was_empty_ab2: test_normalization._airbyte_raw_some_stream_that_was_empty + some_stream_that_was_empty_stg: test_normalization._airbyte_raw_some_stream_that_was_empty + some_stream_that_was_empty_scd: test_normalization._airbyte_raw_some_stream_that_was_empty + some_stream_that_was_empty: test_normalization._airbyte_raw_some_stream_that_was_empty + simple_stream_with_namespace_resulting_into_long_names_ab1: test_normalization_namespace._airbyte_raw_simple_stream_with_namespace_resulting_into_long_names + simple_stream_with_namespace_resulting_into_long_names_ab2: test_normalization_namespace._airbyte_raw_simple_stream_with_namespace_resulting_into_long_names + simple_stream_with_namespace_resulting_into_long_names_ab3: test_normalization_namespace._airbyte_raw_simple_stream_with_namespace_resulting_into_long_names + simple_stream_with_namespace_resulting_into_long_names: test_normalization_namespace._airbyte_raw_simple_stream_with_namespace_resulting_into_long_names + conflict_stream_name_ab1: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name_ab2: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name_ab3: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_scalar_ab1: test_normalization._airbyte_raw_conflict_stream_scalar + conflict_stream_scalar_ab2: test_normalization._airbyte_raw_conflict_stream_scalar + conflict_stream_scalar_ab3: test_normalization._airbyte_raw_conflict_stream_scalar + conflict_stream_scalar: test_normalization._airbyte_raw_conflict_stream_scalar + conflict_stream_array_ab1: test_normalization._airbyte_raw_conflict_stream_array + conflict_stream_array_ab2: test_normalization._airbyte_raw_conflict_stream_array + conflict_stream_array_ab3: test_normalization._airbyte_raw_conflict_stream_array + conflict_stream_array: test_normalization._airbyte_raw_conflict_stream_array + unnest_alias_ab1: test_normalization._airbyte_raw_unnest_alias + unnest_alias_ab2: test_normalization._airbyte_raw_unnest_alias + unnest_alias_ab3: test_normalization._airbyte_raw_unnest_alias + unnest_alias: test_normalization._airbyte_raw_unnest_alias + nested_stream_with_complex_columns_resulting_into_long_names_partition_ab1: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_complex_columns_resulting_into_long_names_partition_ab2: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_complex_columns_resulting_into_long_names_partition_ab3: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_complex_columns_resulting_into_long_names_partition: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + conflict_stream_name_conflict_stream_name_ab1: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name_conflict_stream_name_ab2: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name_conflict_stream_name_ab3: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name_conflict_stream_name: test_normalization._airbyte_raw_conflict_stream_name + unnest_alias_children_ab1: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children_ab2: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children_ab3: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children: test_normalization._airbyte_raw_unnest_alias + nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data_ab1: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data_ab2: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data_ab3: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_complex_columns_resulting_into_long_names_partition_DATA_ab1: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_complex_columns_resulting_into_long_names_partition_DATA_ab2: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_complex_columns_resulting_into_long_names_partition_DATA_ab3: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_complex_columns_resulting_into_long_names_partition_DATA: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + conflict_stream_name_conflict_stream_name_conflict_stream_name_ab1: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name_conflict_stream_name_conflict_stream_name_ab2: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name_conflict_stream_name_conflict_stream_name_ab3: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name_conflict_stream_name_conflict_stream_name: test_normalization._airbyte_raw_conflict_stream_name + unnest_alias_children_owner_ab1: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children_owner_ab2: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children_owner_ab3: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children_owner: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children_owner_column___with__quotes_ab1: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children_owner_column___with__quotes_ab2: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children_owner_column___with__quotes_ab3: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children_owner_column___with__quotes: test_normalization._airbyte_raw_unnest_alias diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_ab1.sql index 9f8d6b5f44c5..b988a169ef1f 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_ab1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_ab1.sql @@ -17,5 +17,5 @@ select from {{ source('test_normalization', '_airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names') }} as table_alias -- nested_stream_with_complex_columns_resulting_into_long_names where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_ab2.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_ab2.sql index 12ad3a51c83d..3c6ed6e761a2 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_ab2.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_ab2.sql @@ -17,5 +17,5 @@ select from {{ ref('nested_stream_with_complex_columns_resulting_into_long_names_ab1') }} -- nested_stream_with_complex_columns_resulting_into_long_names where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_DATA_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_DATA_ab1.sql index 0dcbf25c475c..3ada03a427fe 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_DATA_ab1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_DATA_ab1.sql @@ -18,5 +18,5 @@ from {{ ref('nested_stream_with_complex_columns_resulting_into_long_names_partit {{ cross_join_unnest('partition', 'DATA') }} where 1 = 1 and DATA is not null -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_ab1.sql index 7a6fbe78ed1c..0734951e5126 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_ab1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_ab1.sql @@ -17,5 +17,5 @@ from {{ ref('nested_stream_with_complex_columns_resulting_into_long_names_scd') -- partition at nested_stream_with_complex_columns_resulting_into_long_names/partition where 1 = 1 and {{ adapter.quote('partition') }} is not null -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data_ab1.sql index 50893664fdb4..912073c31727 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data_ab1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data_ab1.sql @@ -18,5 +18,5 @@ from {{ ref('nested_stream_with_complex_columns_resulting_into_long_names_partit {{ cross_join_unnest('partition', 'double_array_data') }} where 1 = 1 and double_array_data is not null -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_incremental/scd/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_incremental/scd/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_scd.sql index d814d04ecc61..1df163184ca0 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_incremental/scd/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_incremental/scd/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_scd.sql @@ -3,7 +3,53 @@ partition_by = {"field": "_airbyte_active_row", "data_type": "int64", "range": {"start": 0, "end": 1, "interval": 1}}, unique_key = "_airbyte_unique_key_scd", schema = "test_normalization", - post_hook = ["drop view _airbyte_test_normalization.nested_stream_with_complex_columns_resulting_into_long_names_stg"], + post_hook = [" + {% + set final_table_relation = adapter.get_relation( + database=this.database, + schema=this.schema, + identifier='nested_stream_with_complex_columns_resulting_into_long_names' + ) + %} + {# + If the final table doesn't exist, then obviously we can't delete anything from it. + Also, after a reset, the final table is created without the _airbyte_unique_key column (this column is created during the first sync) + So skip this deletion if the column doesn't exist. (in this case, the table is guaranteed to be empty anyway) + #} + {% + if final_table_relation is not none and '_airbyte_unique_key' in adapter.get_columns_in_relation(final_table_relation)|map(attribute='name') + %} + -- Delete records which are no longer active: + -- This query is equivalent, but the left join version is more performant: + -- delete from final_table where unique_key in ( + -- select unique_key from scd_table where 1 = 1 + -- ) and unique_key not in ( + -- select unique_key from scd_table where active_row = 1 + -- ) + -- We're incremental against normalized_at rather than emitted_at because we need to fetch the SCD + -- entries that were _updated_ recently. This is because a deleted record will have an SCD record + -- which was emitted a long time ago, but recently re-normalized to have active_row = 0. + delete from {{ final_table_relation }} final_table where final_table._airbyte_unique_key in ( + select recent_records.unique_key + from ( + select distinct _airbyte_unique_key as unique_key + from {{ this }} + where 1=1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('nested_stream_with_complex_columns_resulting_into_long_names')) }} + ) recent_records + left join ( + select _airbyte_unique_key as unique_key, count(_airbyte_unique_key) as active_count + from {{ this }} + where _airbyte_active_row = 1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('nested_stream_with_complex_columns_resulting_into_long_names')) }} + group by _airbyte_unique_key + ) active_counts + on recent_records.unique_key = active_counts.unique_key + where active_count is null or active_count = 0 + ) + {% else %} + -- We have to have a non-empty query, so just do a noop delete + delete from {{ this }} where 1=0 + {% endif %} + ","drop view _airbyte_test_normalization.nested_stream_with_complex_columns_resulting_into_long_names_stg"], tags = [ "top-level" ] ) }} -- depends_on: ref('nested_stream_with_complex_columns_resulting_into_long_names_stg') @@ -16,7 +62,7 @@ new_data as ( from {{ ref('nested_stream_with_complex_columns_resulting_into_long_names_stg') }} -- nested_stream_with_complex_columns_resulting_into_long_names from {{ source('test_normalization', '_airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names') }} where 1 = 1 - {{ incremental_clause('_airbyte_emitted_at') }} + {{ incremental_clause('_airbyte_emitted_at', this) }} ), new_data_ids as ( -- build a subset of _airbyte_unique_key from rows that are new diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names.sql index 5009469d5e7b..c0bd55eeb61d 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names.sql @@ -20,5 +20,5 @@ from {{ ref('nested_stream_with_complex_columns_resulting_into_long_names_scd') -- nested_stream_with_complex_columns_resulting_into_long_names from {{ source('test_normalization', '_airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names') }} where 1 = 1 and _airbyte_active_row = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition.sql index 955c2a891bba..f8cd174b2a5b 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition.sql @@ -17,5 +17,5 @@ select from {{ ref('nested_stream_with_complex_columns_resulting_into_long_names_partition_ab3') }} -- partition at nested_stream_with_complex_columns_resulting_into_long_names/partition from {{ ref('nested_stream_with_complex_columns_resulting_into_long_names_scd') }} where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_DATA.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_DATA.sql index ac5be7d87262..861e33d4859a 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_DATA.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_DATA.sql @@ -16,5 +16,5 @@ select from {{ ref('nested_stream_with_complex_columns_resulting_into_long_names_partition_DATA_ab3') }} -- DATA at nested_stream_with_complex_columns_resulting_into_long_names/partition/DATA from {{ ref('nested_stream_with_complex_columns_resulting_into_long_names_partition') }} where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data.sql index 91f3e95fddbe..c6b980124a5a 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data.sql @@ -16,5 +16,5 @@ select from {{ ref('nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data_ab3') }} -- double_array_data at nested_stream_with_complex_columns_resulting_into_long_names/partition/double_array_data from {{ ref('nested_stream_with_complex_columns_resulting_into_long_names_partition') }} where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/dbt_project.yml b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/dbt_project.yml index 88dde818dd4d..77cd51053747 100755 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/dbt_project.yml +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/dbt_project.yml @@ -1,45 +1,29 @@ -# This file is necessary to install dbt-utils with dbt deps -# the content will be overwritten by the transform function - -# Name your package! Package names should contain only lowercase characters -# and underscores. A good package name should reflect your organization's -# name or the intended use of these models -name: "airbyte_utils" +name: airbyte_utils version: "1.0" config-version: 2 - -# This setting configures which "profile" dbt uses for this project. Profiles contain -# database connection information, and should be configured in the ~/.dbt/profiles.yml file -profile: "normalize" - -# These configurations specify where dbt should look for different types of files. -# The `model-paths` config, for example, states that source models can be found -# in the "models/" directory. You probably won't need to change these! -model-paths: ["modified_models"] -docs-paths: ["docs"] -analysis-paths: ["analysis"] -test-paths: ["tests"] -seed-paths: ["data"] -macro-paths: ["macros"] - -target-path: "../build" # directory which will store compiled SQL files -log-path: "../logs" # directory which will store DBT logs -packages-install-path: "/dbt" # directory which will store external DBT dependencies - -clean-targets: # directories to be removed by `dbt clean` - - "build" - - "dbt_modules" - +profile: normalize +model-paths: + - modified_models +docs-paths: + - docs +analysis-paths: + - analysis +test-paths: + - tests +seed-paths: + - data +macro-paths: + - macros +target-path: ../build +log-path: ../logs +packages-install-path: /dbt +clean-targets: + - build + - dbt_modules quoting: database: true - # Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) - # all schemas should be unquoted schema: false identifier: true - -# You can define configurations for models in the `model-paths` directory here. -# Using these configurations, you can enable or disable models, change how they -# are materialized, and more! models: airbyte_utils: +materialized: table @@ -57,7 +41,30 @@ models: airbyte_views: +tags: airbyte_internal_views +materialized: view - dispatch: - macro_namespace: dbt_utils - search_order: ["airbyte_utils", "dbt_utils"] + search_order: + - airbyte_utils + - dbt_utils +vars: + json_column: _airbyte_data + models_to_source: + exchange_rate_ab1: test_normalization._airbyte_raw_exchange_rate + exchange_rate_ab2: test_normalization._airbyte_raw_exchange_rate + exchange_rate_ab3: test_normalization._airbyte_raw_exchange_rate + exchange_rate: test_normalization._airbyte_raw_exchange_rate + dedup_exchange_rate_ab1: test_normalization._airbyte_raw_dedup_exchange_rate + dedup_exchange_rate_ab2: test_normalization._airbyte_raw_dedup_exchange_rate + dedup_exchange_rate_stg: test_normalization._airbyte_raw_dedup_exchange_rate + dedup_exchange_rate_scd: test_normalization._airbyte_raw_dedup_exchange_rate + dedup_exchange_rate: test_normalization._airbyte_raw_dedup_exchange_rate + renamed_dedup_cdc_excluded_ab1: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded_ab2: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded_stg: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded_scd: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + dedup_cdc_excluded_ab1: test_normalization._airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded_ab2: test_normalization._airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded_stg: test_normalization._airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded_scd: test_normalization._airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded: test_normalization._airbyte_raw_dedup_cdc_excluded diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/first_dbt_project.yml b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/first_dbt_project.yml index 7631ef356dc9..200e87ca5ea7 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/first_dbt_project.yml +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/first_dbt_project.yml @@ -1,45 +1,29 @@ -# This file is necessary to install dbt-utils with dbt deps -# the content will be overwritten by the transform function - -# Name your package! Package names should contain only lowercase characters -# and underscores. A good package name should reflect your organization's -# name or the intended use of these models -name: "airbyte_utils" +name: airbyte_utils version: "1.0" config-version: 2 - -# This setting configures which "profile" dbt uses for this project. Profiles contain -# database connection information, and should be configured in the ~/.dbt/profiles.yml file -profile: "normalize" - -# These configurations specify where dbt should look for different types of files. -# The `model-paths` config, for example, states that source models can be found -# in the "models/" directory. You probably won't need to change these! -model-paths: ["models"] -docs-paths: ["docs"] -analysis-paths: ["analysis"] -test-paths: ["tests"] -seed-paths: ["data"] -macro-paths: ["macros"] - -target-path: "../build" # directory which will store compiled SQL files -log-path: "../logs" # directory which will store DBT logs -packages-install-path: "/dbt" # directory which will store external DBT dependencies - -clean-targets: # directories to be removed by `dbt clean` - - "build" - - "dbt_modules" - +profile: normalize +model-paths: + - models +docs-paths: + - docs +analysis-paths: + - analysis +test-paths: + - tests +seed-paths: + - data +macro-paths: + - macros +target-path: ../build +log-path: ../logs +packages-install-path: /dbt +clean-targets: + - build + - dbt_modules quoting: database: true - # Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) - # all schemas should be unquoted schema: false identifier: true - -# You can define configurations for models in the `model-paths` directory here. -# Using these configurations, you can enable or disable models, change how they -# are materialized, and more! models: airbyte_utils: +materialized: table @@ -57,7 +41,45 @@ models: airbyte_views: +tags: airbyte_internal_views +materialized: view - dispatch: - macro_namespace: dbt_utils - search_order: ["airbyte_utils", "dbt_utils"] + search_order: + - airbyte_utils + - dbt_utils +vars: + json_column: _airbyte_data + models_to_source: + exchange_rate_ab1: test_normalization._airbyte_raw_exchange_rate + exchange_rate_ab2: test_normalization._airbyte_raw_exchange_rate + exchange_rate_ab3: test_normalization._airbyte_raw_exchange_rate + exchange_rate: test_normalization._airbyte_raw_exchange_rate + dedup_exchange_rate_ab1: test_normalization._airbyte_raw_dedup_exchange_rate + dedup_exchange_rate_ab2: test_normalization._airbyte_raw_dedup_exchange_rate + dedup_exchange_rate_stg: test_normalization._airbyte_raw_dedup_exchange_rate + dedup_exchange_rate_scd: test_normalization._airbyte_raw_dedup_exchange_rate + dedup_exchange_rate: test_normalization._airbyte_raw_dedup_exchange_rate + renamed_dedup_cdc_excluded_ab1: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded_ab2: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded_stg: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded_scd: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + dedup_cdc_excluded_ab1: test_normalization._airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded_ab2: test_normalization._airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded_stg: test_normalization._airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded_scd: test_normalization._airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded: test_normalization._airbyte_raw_dedup_cdc_excluded + pos_dedup_cdcx_ab1: test_normalization._airbyte_raw_pos_dedup_cdcx + pos_dedup_cdcx_ab2: test_normalization._airbyte_raw_pos_dedup_cdcx + pos_dedup_cdcx_stg: test_normalization._airbyte_raw_pos_dedup_cdcx + pos_dedup_cdcx_scd: test_normalization._airbyte_raw_pos_dedup_cdcx + pos_dedup_cdcx: test_normalization._airbyte_raw_pos_dedup_cdcx + 1_prefix_startwith_number_ab1: test_normalization._airbyte_raw_1_prefix_startwith_number + 1_prefix_startwith_number_ab2: test_normalization._airbyte_raw_1_prefix_startwith_number + 1_prefix_startwith_number_stg: test_normalization._airbyte_raw_1_prefix_startwith_number + 1_prefix_startwith_number_scd: test_normalization._airbyte_raw_1_prefix_startwith_number + 1_prefix_startwith_number: test_normalization._airbyte_raw_1_prefix_startwith_number + multiple_column_names_conflicts_ab1: test_normalization._airbyte_raw_multiple_column_names_conflicts + multiple_column_names_conflicts_ab2: test_normalization._airbyte_raw_multiple_column_names_conflicts + multiple_column_names_conflicts_stg: test_normalization._airbyte_raw_multiple_column_names_conflicts + multiple_column_names_conflicts_scd: test_normalization._airbyte_raw_multiple_column_names_conflicts + multiple_column_names_conflicts: test_normalization._airbyte_raw_multiple_column_names_conflicts diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql index 0555b00e382a..8ef08eb1d426 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql @@ -22,5 +22,5 @@ select from {{ source('test_normalization', '_airbyte_raw_dedup_exchange_rate') }} as table_alias -- dedup_exchange_rate where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql index 6df3dfdc2552..eb02cc4ecf85 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql @@ -22,5 +22,5 @@ select from {{ ref('dedup_exchange_rate_ab1') }} -- dedup_exchange_rate where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql index cd673ea4b56c..ce21bef8c722 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql @@ -3,7 +3,53 @@ partition_by = {"field": "_airbyte_active_row", "data_type": "int64", "range": {"start": 0, "end": 1, "interval": 1}}, unique_key = "_airbyte_unique_key_scd", schema = "test_normalization", - post_hook = ["drop view _airbyte_test_normalization.dedup_exchange_rate_stg"], + post_hook = [" + {% + set final_table_relation = adapter.get_relation( + database=this.database, + schema=this.schema, + identifier='dedup_exchange_rate' + ) + %} + {# + If the final table doesn't exist, then obviously we can't delete anything from it. + Also, after a reset, the final table is created without the _airbyte_unique_key column (this column is created during the first sync) + So skip this deletion if the column doesn't exist. (in this case, the table is guaranteed to be empty anyway) + #} + {% + if final_table_relation is not none and '_airbyte_unique_key' in adapter.get_columns_in_relation(final_table_relation)|map(attribute='name') + %} + -- Delete records which are no longer active: + -- This query is equivalent, but the left join version is more performant: + -- delete from final_table where unique_key in ( + -- select unique_key from scd_table where 1 = 1 + -- ) and unique_key not in ( + -- select unique_key from scd_table where active_row = 1 + -- ) + -- We're incremental against normalized_at rather than emitted_at because we need to fetch the SCD + -- entries that were _updated_ recently. This is because a deleted record will have an SCD record + -- which was emitted a long time ago, but recently re-normalized to have active_row = 0. + delete from {{ final_table_relation }} final_table where final_table._airbyte_unique_key in ( + select recent_records.unique_key + from ( + select distinct _airbyte_unique_key as unique_key + from {{ this }} + where 1=1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('dedup_exchange_rate')) }} + ) recent_records + left join ( + select _airbyte_unique_key as unique_key, count(_airbyte_unique_key) as active_count + from {{ this }} + where _airbyte_active_row = 1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('dedup_exchange_rate')) }} + group by _airbyte_unique_key + ) active_counts + on recent_records.unique_key = active_counts.unique_key + where active_count is null or active_count = 0 + ) + {% else %} + -- We have to have a non-empty query, so just do a noop delete + delete from {{ this }} where 1=0 + {% endif %} + ","drop view _airbyte_test_normalization.dedup_exchange_rate_stg"], tags = [ "top-level" ] ) }} -- depends_on: ref('dedup_exchange_rate_stg') @@ -16,7 +62,7 @@ new_data as ( from {{ ref('dedup_exchange_rate_stg') }} -- dedup_exchange_rate from {{ source('test_normalization', '_airbyte_raw_dedup_exchange_rate') }} where 1 = 1 - {{ incremental_clause('_airbyte_emitted_at') }} + {{ incremental_clause('_airbyte_emitted_at', this) }} ), new_data_ids as ( -- build a subset of _airbyte_unique_key from rows that are new diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql index 3e23097c346f..eb3c93754b6b 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql @@ -25,5 +25,5 @@ from {{ ref('dedup_exchange_rate_scd') }} -- dedup_exchange_rate from {{ source('test_normalization', '_airbyte_raw_dedup_exchange_rate') }} where 1 = 1 and _airbyte_active_row = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/models/generated/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/models/generated/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql index 1c8897f665ea..45262775f20b 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/models/generated/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/models/generated/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql @@ -22,5 +22,5 @@ select from {{ ref('dedup_exchange_rate_ab2') }} tmp -- dedup_exchange_rate where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql index 13316b96ee54..b86bc98fe997 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql @@ -22,5 +22,5 @@ select from {{ source('test_normalization', '_airbyte_raw_dedup_exchange_rate') }} as table_alias -- dedup_exchange_rate where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql index 96b419dd87f9..09146ddd1c9f 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql @@ -22,5 +22,5 @@ select from {{ ref('dedup_exchange_rate_ab1') }} -- dedup_exchange_rate where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/modified_models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/modified_models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql index ccec637092e3..4f6b80934992 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/modified_models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/modified_models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql @@ -3,7 +3,53 @@ partition_by = {"field": "_airbyte_active_row", "data_type": "int64", "range": {"start": 0, "end": 1, "interval": 1}}, unique_key = "_airbyte_unique_key_scd", schema = "test_normalization", - post_hook = ["drop view _airbyte_test_normalization.dedup_exchange_rate_stg"], + post_hook = [" + {% + set final_table_relation = adapter.get_relation( + database=this.database, + schema=this.schema, + identifier='dedup_exchange_rate' + ) + %} + {# + If the final table doesn't exist, then obviously we can't delete anything from it. + Also, after a reset, the final table is created without the _airbyte_unique_key column (this column is created during the first sync) + So skip this deletion if the column doesn't exist. (in this case, the table is guaranteed to be empty anyway) + #} + {% + if final_table_relation is not none and '_airbyte_unique_key' in adapter.get_columns_in_relation(final_table_relation)|map(attribute='name') + %} + -- Delete records which are no longer active: + -- This query is equivalent, but the left join version is more performant: + -- delete from final_table where unique_key in ( + -- select unique_key from scd_table where 1 = 1 + -- ) and unique_key not in ( + -- select unique_key from scd_table where active_row = 1 + -- ) + -- We're incremental against normalized_at rather than emitted_at because we need to fetch the SCD + -- entries that were _updated_ recently. This is because a deleted record will have an SCD record + -- which was emitted a long time ago, but recently re-normalized to have active_row = 0. + delete from {{ final_table_relation }} final_table where final_table._airbyte_unique_key in ( + select recent_records.unique_key + from ( + select distinct _airbyte_unique_key as unique_key + from {{ this }} + where 1=1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('dedup_exchange_rate')) }} + ) recent_records + left join ( + select _airbyte_unique_key as unique_key, count(_airbyte_unique_key) as active_count + from {{ this }} + where _airbyte_active_row = 1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('dedup_exchange_rate')) }} + group by _airbyte_unique_key + ) active_counts + on recent_records.unique_key = active_counts.unique_key + where active_count is null or active_count = 0 + ) + {% else %} + -- We have to have a non-empty query, so just do a noop delete + delete from {{ this }} where 1=0 + {% endif %} + ","drop view _airbyte_test_normalization.dedup_exchange_rate_stg"], tags = [ "top-level" ] ) }} -- depends_on: ref('dedup_exchange_rate_stg') @@ -16,7 +62,7 @@ new_data as ( from {{ ref('dedup_exchange_rate_stg') }} -- dedup_exchange_rate from {{ source('test_normalization', '_airbyte_raw_dedup_exchange_rate') }} where 1 = 1 - {{ incremental_clause('_airbyte_emitted_at') }} + {{ incremental_clause('_airbyte_emitted_at', this) }} ), new_data_ids as ( -- build a subset of _airbyte_unique_key from rows that are new diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/modified_models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/modified_models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql index fabc0a638c02..96601fc9d287 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/modified_models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/modified_models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql @@ -25,5 +25,5 @@ from {{ ref('dedup_exchange_rate_scd') }} -- dedup_exchange_rate from {{ source('test_normalization', '_airbyte_raw_dedup_exchange_rate') }} where 1 = 1 and _airbyte_active_row = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/modified_models/generated/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/modified_models/generated/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql index c67573e91762..da37e7dc7eae 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/modified_models/generated/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/modified_models/generated/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql @@ -22,5 +22,5 @@ select from {{ ref('dedup_exchange_rate_ab2') }} tmp -- dedup_exchange_rate where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/modified_models/generated/sources.yml b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/modified_models/generated/sources.yml index dd538a80131a..79ad1a1bb5c5 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/modified_models/generated/sources.yml +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/modified_models/generated/sources.yml @@ -1,11 +1,12 @@ version: 2 sources: -- name: test_normalization - quoting: - database: true - schema: false - identifier: false - tables: - - name: _airbyte_raw_dedup_exchange_rate - - name: _airbyte_raw_exchange_rate - - name: _airbyte_raw_renamed_dedup_cdc_excluded + - name: test_normalization + quoting: + database: true + schema: false + identifier: false + tables: + - name: _airbyte_raw_dedup_cdc_excluded + - name: _airbyte_raw_dedup_exchange_rate + - name: _airbyte_raw_exchange_rate + - name: _airbyte_raw_renamed_dedup_cdc_excluded diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/dbt_project.yml b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/dbt_project.yml index 39f7bd7b02ca..02cf2fd559fd 100755 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/dbt_project.yml +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/dbt_project.yml @@ -1,45 +1,29 @@ -# This file is necessary to install dbt-utils with dbt deps -# the content will be overwritten by the transform function - -# Name your package! Package names should contain only lowercase characters -# and underscores. A good package name should reflect your organization's -# name or the intended use of these models -name: "airbyte_utils" +name: airbyte_utils version: "1.0" config-version: 2 - -# This setting configures which "profile" dbt uses for this project. Profiles contain -# database connection information, and should be configured in the ~/.dbt/profiles.yml file -profile: "normalize" - -# These configurations specify where dbt should look for different types of files. -# The `model-paths` config, for example, states that source models can be found -# in the "models/" directory. You probably won't need to change these! -model-paths: ["models"] -docs-paths: ["docs"] -analysis-paths: ["analysis"] -test-paths: ["tests"] -seed-paths: ["data"] -macro-paths: ["macros"] - -target-path: "../build" # directory which will store compiled SQL files -log-path: "../logs" # directory which will store DBT logs -packages-install-path: "/dbt" # directory which will store external DBT dependencies - -clean-targets: # directories to be removed by `dbt clean` - - "build" - - "dbt_modules" - +profile: normalize +model-paths: + - models +docs-paths: + - docs +analysis-paths: + - analysis +test-paths: + - tests +seed-paths: + - data +macro-paths: + - macros +target-path: ../build +log-path: ../logs +packages-install-path: /dbt +clean-targets: + - build + - dbt_modules quoting: database: true - # Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) - # all schemas should be unquoted schema: false identifier: true - -# You can define configurations for models in the `model-paths` directory here. -# Using these configurations, you can enable or disable models, change how they -# are materialized, and more! models: airbyte_utils: +materialized: table @@ -50,15 +34,52 @@ models: airbyte_incremental: +tags: incremental_tables +materialized: incremental - # schema change test isn't supported in ClickHouse yet - +on_schema_change: "ignore" + +on_schema_change: ignore airbyte_tables: +tags: normalized_tables +materialized: table airbyte_views: +tags: airbyte_internal_views +materialized: view - dispatch: - macro_namespace: dbt_utils - search_order: ["airbyte_utils", "dbt_utils"] + search_order: + - airbyte_utils + - dbt_utils +vars: + json_column: _airbyte_data + models_to_source: + exchange_rate_ab1: test_normalization._airbyte_raw_exchange_rate + exchange_rate_ab2: test_normalization._airbyte_raw_exchange_rate + exchange_rate_ab3: test_normalization._airbyte_raw_exchange_rate + exchange_rate: test_normalization._airbyte_raw_exchange_rate + dedup_exchange_rate_ab1: test_normalization._airbyte_raw_dedup_exchange_rate + dedup_exchange_rate_ab2: test_normalization._airbyte_raw_dedup_exchange_rate + dedup_exchange_rate_stg: test_normalization._airbyte_raw_dedup_exchange_rate + dedup_exchange_rate_scd: test_normalization._airbyte_raw_dedup_exchange_rate + dedup_exchange_rate: test_normalization._airbyte_raw_dedup_exchange_rate + renamed_dedup_cdc_excluded_ab1: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded_ab2: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded_stg: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded_scd: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + dedup_cdc_excluded_ab1: test_normalization._airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded_ab2: test_normalization._airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded_stg: test_normalization._airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded_scd: test_normalization._airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded: test_normalization._airbyte_raw_dedup_cdc_excluded + pos_dedup_cdcx_ab1: test_normalization._airbyte_raw_pos_dedup_cdcx + pos_dedup_cdcx_ab2: test_normalization._airbyte_raw_pos_dedup_cdcx + pos_dedup_cdcx_stg: test_normalization._airbyte_raw_pos_dedup_cdcx + pos_dedup_cdcx_scd: test_normalization._airbyte_raw_pos_dedup_cdcx + pos_dedup_cdcx: test_normalization._airbyte_raw_pos_dedup_cdcx + 1_prefix_startwith_number_ab1: test_normalization._airbyte_raw_1_prefix_startwith_number + 1_prefix_startwith_number_ab2: test_normalization._airbyte_raw_1_prefix_startwith_number + 1_prefix_startwith_number_stg: test_normalization._airbyte_raw_1_prefix_startwith_number + 1_prefix_startwith_number_scd: test_normalization._airbyte_raw_1_prefix_startwith_number + 1_prefix_startwith_number: test_normalization._airbyte_raw_1_prefix_startwith_number + multiple_column_names_conflicts_ab1: test_normalization._airbyte_raw_multiple_column_names_conflicts + multiple_column_names_conflicts_ab2: test_normalization._airbyte_raw_multiple_column_names_conflicts + multiple_column_names_conflicts_stg: test_normalization._airbyte_raw_multiple_column_names_conflicts + multiple_column_names_conflicts_scd: test_normalization._airbyte_raw_multiple_column_names_conflicts + multiple_column_names_conflicts: test_normalization._airbyte_raw_multiple_column_names_conflicts diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql index 6e998ca14141..b0c2c4aa7fa3 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql @@ -20,5 +20,5 @@ select from {{ source('test_normalization', '_airbyte_raw_dedup_exchange_rate') }} as table_alias -- dedup_exchange_rate where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql index ee41ee94585e..842453ba3928 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql @@ -20,5 +20,5 @@ select from {{ ref('dedup_exchange_rate_ab1') }} -- dedup_exchange_rate where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_ctes/test_normalization/renamed_dedup_cdc_excluded_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_ctes/test_normalization/renamed_dedup_cdc_excluded_ab1.sql index 88a3674f694b..5d3e0d7f6abf 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_ctes/test_normalization/renamed_dedup_cdc_excluded_ab1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_ctes/test_normalization/renamed_dedup_cdc_excluded_ab1.sql @@ -14,5 +14,5 @@ select from {{ source('test_normalization', '_airbyte_raw_renamed_dedup_cdc_excluded') }} as table_alias -- renamed_dedup_cdc_excluded where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_ctes/test_normalization/renamed_dedup_cdc_excluded_ab2.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_ctes/test_normalization/renamed_dedup_cdc_excluded_ab2.sql index b192f4915e98..c6885e98962e 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_ctes/test_normalization/renamed_dedup_cdc_excluded_ab2.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_ctes/test_normalization/renamed_dedup_cdc_excluded_ab2.sql @@ -14,5 +14,5 @@ select from {{ ref('renamed_dedup_cdc_excluded_ab1') }} -- renamed_dedup_cdc_excluded where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/dedup_cdc_excluded_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/dedup_cdc_excluded_scd.sql index 99f32737436d..f87d45a5c18c 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/dedup_cdc_excluded_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/dedup_cdc_excluded_scd.sql @@ -1,7 +1,53 @@ {{ config( unique_key = "_airbyte_unique_key_scd", schema = "test_normalization", - post_hook = ["drop view _airbyte_test_normalization.dedup_cdc_excluded_stg"], + post_hook = [" + {% + set final_table_relation = adapter.get_relation( + database=this.database, + schema=this.schema, + identifier='dedup_cdc_excluded' + ) + %} + {# + If the final table doesn't exist, then obviously we can't delete anything from it. + Also, after a reset, the final table is created without the _airbyte_unique_key column (this column is created during the first sync) + So skip this deletion if the column doesn't exist. (in this case, the table is guaranteed to be empty anyway) + #} + {% + if final_table_relation is not none and '_airbyte_unique_key' in adapter.get_columns_in_relation(final_table_relation)|map(attribute='name') + %} + -- Delete records which are no longer active: + -- This query is equivalent, but the left join version is more performant: + -- delete from final_table where unique_key in ( + -- select unique_key from scd_table where 1 = 1 + -- ) and unique_key not in ( + -- select unique_key from scd_table where active_row = 1 + -- ) + -- We're incremental against normalized_at rather than emitted_at because we need to fetch the SCD + -- entries that were _updated_ recently. This is because a deleted record will have an SCD record + -- which was emitted a long time ago, but recently re-normalized to have active_row = 0. + alter table {{ final_table_relation }} delete where _airbyte_unique_key in ( + select recent_records.unique_key + from ( + select distinct _airbyte_unique_key as unique_key + from {{ this }} + where 1=1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + quote('dedup_cdc_excluded')) }} + ) recent_records + left join ( + select _airbyte_unique_key as unique_key, count(_airbyte_unique_key) as active_count + from {{ this }} + where _airbyte_active_row = 1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + quote('dedup_cdc_excluded')) }} + group by _airbyte_unique_key + ) active_counts + on recent_records.unique_key = active_counts.unique_key + where active_count is null or active_count = 0 + ) + {% else %} + -- We have to have a non-empty query, so just do a noop delete + alter table {{ this }} delete where 1=0 + {% endif %} + ","drop view _airbyte_test_normalization.dedup_cdc_excluded_stg"], tags = [ "top-level" ] ) }} -- depends_on: ref('dedup_cdc_excluded_stg') @@ -14,7 +60,7 @@ new_data as ( from {{ ref('dedup_cdc_excluded_stg') }} -- dedup_cdc_excluded from {{ source('test_normalization', '_airbyte_raw_dedup_cdc_excluded') }} where 1 = 1 - {{ incremental_clause('_airbyte_emitted_at') }} + {{ incremental_clause('_airbyte_emitted_at', this) }} ), new_data_ids as ( -- build a subset of _airbyte_unique_key from rows that are new diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql index eff375bdc37d..bd834917f06b 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql @@ -1,7 +1,53 @@ {{ config( unique_key = "_airbyte_unique_key_scd", schema = "test_normalization", - post_hook = ["drop view _airbyte_test_normalization.dedup_exchange_rate_stg"], + post_hook = [" + {% + set final_table_relation = adapter.get_relation( + database=this.database, + schema=this.schema, + identifier='dedup_exchange_rate' + ) + %} + {# + If the final table doesn't exist, then obviously we can't delete anything from it. + Also, after a reset, the final table is created without the _airbyte_unique_key column (this column is created during the first sync) + So skip this deletion if the column doesn't exist. (in this case, the table is guaranteed to be empty anyway) + #} + {% + if final_table_relation is not none and '_airbyte_unique_key' in adapter.get_columns_in_relation(final_table_relation)|map(attribute='name') + %} + -- Delete records which are no longer active: + -- This query is equivalent, but the left join version is more performant: + -- delete from final_table where unique_key in ( + -- select unique_key from scd_table where 1 = 1 + -- ) and unique_key not in ( + -- select unique_key from scd_table where active_row = 1 + -- ) + -- We're incremental against normalized_at rather than emitted_at because we need to fetch the SCD + -- entries that were _updated_ recently. This is because a deleted record will have an SCD record + -- which was emitted a long time ago, but recently re-normalized to have active_row = 0. + alter table {{ final_table_relation }} delete where _airbyte_unique_key in ( + select recent_records.unique_key + from ( + select distinct _airbyte_unique_key as unique_key + from {{ this }} + where 1=1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + quote('dedup_exchange_rate')) }} + ) recent_records + left join ( + select _airbyte_unique_key as unique_key, count(_airbyte_unique_key) as active_count + from {{ this }} + where _airbyte_active_row = 1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + quote('dedup_exchange_rate')) }} + group by _airbyte_unique_key + ) active_counts + on recent_records.unique_key = active_counts.unique_key + where active_count is null or active_count = 0 + ) + {% else %} + -- We have to have a non-empty query, so just do a noop delete + alter table {{ this }} delete where 1=0 + {% endif %} + ","drop view _airbyte_test_normalization.dedup_exchange_rate_stg"], tags = [ "top-level" ] ) }} -- depends_on: ref('dedup_exchange_rate_stg') @@ -14,7 +60,7 @@ new_data as ( from {{ ref('dedup_exchange_rate_stg') }} -- dedup_exchange_rate from {{ source('test_normalization', '_airbyte_raw_dedup_exchange_rate') }} where 1 = 1 - {{ incremental_clause('_airbyte_emitted_at') }} + {{ incremental_clause('_airbyte_emitted_at', this) }} ), new_data_ids as ( -- build a subset of _airbyte_unique_key from rows that are new diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql index 180310a437ff..5b8ff875d3a3 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql @@ -23,5 +23,5 @@ from {{ ref('dedup_exchange_rate_scd') }} -- dedup_exchange_rate from {{ source('test_normalization', '_airbyte_raw_dedup_exchange_rate') }} where 1 = 1 and _airbyte_active_row = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded.sql index 1b9cead2c495..4051dd3178c9 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded.sql @@ -17,5 +17,5 @@ from {{ ref('renamed_dedup_cdc_excluded_scd') }} -- renamed_dedup_cdc_excluded from {{ source('test_normalization', '_airbyte_raw_renamed_dedup_cdc_excluded') }} where 1 = 1 and _airbyte_active_row = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql index 0b4900731039..beb710676cb0 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/clickhouse/test_simple_streams/models/generated/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql @@ -20,5 +20,5 @@ select from {{ ref('dedup_exchange_rate_ab2') }} tmp -- dedup_exchange_rate where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/dbt_project.yml b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/dbt_project.yml index db791a568a0b..88b2a40e2d11 100755 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/dbt_project.yml +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/dbt_project.yml @@ -1,45 +1,29 @@ -# This file is necessary to install dbt-utils with dbt deps -# the content will be overwritten by the transform function - -# Name your package! Package names should contain only lowercase characters -# and underscores. A good package name should reflect your organization's -# name or the intended use of these models -name: "airbyte_utils" -version: "1.0" +name: airbyte_utils +version: '1.0' config-version: 2 - -# This setting configures which "profile" dbt uses for this project. Profiles contain -# database connection information, and should be configured in the ~/.dbt/profiles.yml file -profile: "normalize" - -# These configurations specify where dbt should look for different types of files. -# The `source-paths` config, for example, states that source models can be found -# in the "models/" directory. You probably won't need to change these! -source-paths: ["models"] -docs-paths: ["docs"] -analysis-paths: ["analysis"] -test-paths: ["tests"] -data-paths: ["data"] -macro-paths: ["macros"] - -target-path: "../build" # directory which will store compiled SQL files -log-path: "../logs" # directory which will store DBT logs -modules-path: "/dbt" # directory which will store external DBT dependencies - -clean-targets: # directories to be removed by `dbt clean` - - "build" - - "dbt_modules" - +profile: normalize +model-paths: +- models +docs-paths: +- docs +analysis-paths: +- analysis +test-paths: +- tests +seed-paths: +- data +macro-paths: +- macros +target-path: ../build +log-path: ../logs +packages-install-path: /dbt +clean-targets: +- build +- dbt_modules quoting: database: true - # Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) - # all schemas should be unquoted schema: false identifier: true - -# You can define configurations for models in the `source-paths` directory here. -# Using these configurations, you can enable or disable models, change how they -# are materialized, and more! models: airbyte_utils: +materialized: table @@ -49,8 +33,6 @@ models: +materialized: ephemeral airbyte_incremental: +tags: incremental_tables - # incremental is not enabled for MySql yet - #+materialized: incremental +materialized: table airbyte_tables: +tags: normalized_tables @@ -58,6 +40,74 @@ models: airbyte_views: +tags: airbyte_internal_views +materialized: view - vars: - dbt_utils_dispatch_list: ["airbyte_utils"] + dbt_utils_dispatch_list: + - airbyte_utils + json_column: _airbyte_data + models_to_source: + nested_stream_with_co_1g_into_long_names_ab1: test_normalization._airbyte_raw_nested_s__lting_into_long_names + nested_stream_with_co_1g_into_long_names_ab2: test_normalization._airbyte_raw_nested_s__lting_into_long_names + nested_stream_with_co_1g_into_long_names_stg: test_normalization._airbyte_raw_nested_s__lting_into_long_names + nested_stream_with_co_1g_into_long_names_scd: test_normalization._airbyte_raw_nested_s__lting_into_long_names + nested_stream_with_co__lting_into_long_names: test_normalization._airbyte_raw_nested_s__lting_into_long_names + non_nested_stream_wit_1g_into_long_names_ab1: test_normalization._airbyte_raw_non_nest__lting_into_long_names + non_nested_stream_wit_1g_into_long_names_ab2: test_normalization._airbyte_raw_non_nest__lting_into_long_names + non_nested_stream_wit_1g_into_long_names_ab3: test_normalization._airbyte_raw_non_nest__lting_into_long_names + non_nested_stream_wit__lting_into_long_names: test_normalization._airbyte_raw_non_nest__lting_into_long_names + some_stream_that_was_empty_ab1: test_normalization._airbyte_raw_some_stream_that_was_empty + some_stream_that_was_empty_ab2: test_normalization._airbyte_raw_some_stream_that_was_empty + some_stream_that_was_empty_stg: test_normalization._airbyte_raw_some_stream_that_was_empty + some_stream_that_was_empty_scd: test_normalization._airbyte_raw_some_stream_that_was_empty + some_stream_that_was_empty: test_normalization._airbyte_raw_some_stream_that_was_empty + simple_stream_with_na_1g_into_long_names_ab1: test_normalization_namespace._airbyte_raw_simple_s__lting_into_long_names + simple_stream_with_na_1g_into_long_names_ab2: test_normalization_namespace._airbyte_raw_simple_s__lting_into_long_names + simple_stream_with_na_1g_into_long_names_ab3: test_normalization_namespace._airbyte_raw_simple_s__lting_into_long_names + simple_stream_with_na__lting_into_long_names: test_normalization_namespace._airbyte_raw_simple_s__lting_into_long_names + conflict_stream_name_ab1: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name_ab2: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name_ab3: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_scalar_ab1: test_normalization._airbyte_raw_conflict_stream_scalar + conflict_stream_scalar_ab2: test_normalization._airbyte_raw_conflict_stream_scalar + conflict_stream_scalar_ab3: test_normalization._airbyte_raw_conflict_stream_scalar + conflict_stream_scalar: test_normalization._airbyte_raw_conflict_stream_scalar + conflict_stream_array_ab1: test_normalization._airbyte_raw_conflict_stream_array + conflict_stream_array_ab2: test_normalization._airbyte_raw_conflict_stream_array + conflict_stream_array_ab3: test_normalization._airbyte_raw_conflict_stream_array + conflict_stream_array: test_normalization._airbyte_raw_conflict_stream_array + unnest_alias_ab1: test_normalization._airbyte_raw_unnest_alias + unnest_alias_ab2: test_normalization._airbyte_raw_unnest_alias + unnest_alias_ab3: test_normalization._airbyte_raw_unnest_alias + unnest_alias: test_normalization._airbyte_raw_unnest_alias + nested_stream_with_co_2g_names_partition_ab1: test_normalization._airbyte_raw_nested_s__lting_into_long_names + nested_stream_with_co_2g_names_partition_ab2: test_normalization._airbyte_raw_nested_s__lting_into_long_names + nested_stream_with_co_2g_names_partition_ab3: test_normalization._airbyte_raw_nested_s__lting_into_long_names + nested_stream_with_co___long_names_partition: test_normalization._airbyte_raw_nested_s__lting_into_long_names + conflict_stream_name__2flict_stream_name_ab1: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name__2flict_stream_name_ab2: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name__2flict_stream_name_ab3: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name_conflict_stream_name: test_normalization._airbyte_raw_conflict_stream_name + unnest_alias_children_ab1: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children_ab2: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children_ab3: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children: test_normalization._airbyte_raw_unnest_alias + nested_stream_with_co_3double_array_data_ab1: test_normalization._airbyte_raw_nested_s__lting_into_long_names + nested_stream_with_co_3double_array_data_ab2: test_normalization._airbyte_raw_nested_s__lting_into_long_names + nested_stream_with_co_3double_array_data_ab3: test_normalization._airbyte_raw_nested_s__lting_into_long_names + nested_stream_with_co__ion_double_array_data: test_normalization._airbyte_raw_nested_s__lting_into_long_names + nested_stream_with_co_3es_partition_data_ab1: test_normalization._airbyte_raw_nested_s__lting_into_long_names + nested_stream_with_co_3es_partition_data_ab2: test_normalization._airbyte_raw_nested_s__lting_into_long_names + nested_stream_with_co_3es_partition_data_ab3: test_normalization._airbyte_raw_nested_s__lting_into_long_names + nested_stream_with_co___names_partition_data: test_normalization._airbyte_raw_nested_s__lting_into_long_names + conflict_stream_name__3flict_stream_name_ab1: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name__3flict_stream_name_ab2: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name__3flict_stream_name_ab3: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name____conflict_stream_name: test_normalization._airbyte_raw_conflict_stream_name + unnest_alias_children_owner_ab1: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children_owner_ab2: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children_owner_ab3: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children_owner: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children_4mn___with__quotes_ab1: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children_4mn___with__quotes_ab2: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children_4mn___with__quotes_ab3: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children__column___with__quotes: test_normalization._airbyte_raw_unnest_alias diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_co___long_names_partition.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_co___long_names_partition.sql index 90323e9b56b1..9d4975c21dac 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_co___long_names_partition.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_co___long_names_partition.sql @@ -4,7 +4,7 @@ test_normalization.`nested_stream_with_co___long_names_partition__dbt_tmp` as ( -with __dbt__CTE__nested_stream_with_co_2g_names_partition_ab1 as ( +with __dbt__cte__nested_stream_with_co_2g_names_partition_ab1 as ( -- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema -- depends_on: test_normalization.`nested_stream_with_co_1g_into_long_names_scd` @@ -24,10 +24,10 @@ from test_normalization.`nested_stream_with_co_1g_into_long_names_scd` as table_ where 1 = 1 and `partition` is not null -), __dbt__CTE__nested_stream_with_co_2g_names_partition_ab2 as ( +), __dbt__cte__nested_stream_with_co_2g_names_partition_ab2 as ( -- SQL model to cast each column to its adequate SQL type converted from the JSON schema type --- depends_on: __dbt__CTE__nested_stream_with_co_2g_names_partition_ab1 +-- depends_on: __dbt__cte__nested_stream_with_co_2g_names_partition_ab1 select _airbyte_nested_strea__nto_long_names_hashid, double_array_data, @@ -37,23 +37,23 @@ select CURRENT_TIMESTAMP as _airbyte_normalized_at -from __dbt__CTE__nested_stream_with_co_2g_names_partition_ab1 +from __dbt__cte__nested_stream_with_co_2g_names_partition_ab1 -- partition at nested_stream_with_complex_columns_resulting_into_long_names/partition where 1 = 1 -), __dbt__CTE__nested_stream_with_co_2g_names_partition_ab3 as ( +), __dbt__cte__nested_stream_with_co_2g_names_partition_ab3 as ( -- SQL model to build a hash column based on the values of this record --- depends_on: __dbt__CTE__nested_stream_with_co_2g_names_partition_ab2 +-- depends_on: __dbt__cte__nested_stream_with_co_2g_names_partition_ab2 select md5(cast(concat(coalesce(cast(_airbyte_nested_strea__nto_long_names_hashid as char), ''), '-', coalesce(cast(double_array_data as char), ''), '-', coalesce(cast(`DATA` as char), '')) as char)) as _airbyte_partition_hashid, tmp.* -from __dbt__CTE__nested_stream_with_co_2g_names_partition_ab2 tmp +from __dbt__cte__nested_stream_with_co_2g_names_partition_ab2 tmp -- partition at nested_stream_with_complex_columns_resulting_into_long_names/partition where 1 = 1 )-- Final base SQL model --- depends_on: __dbt__CTE__nested_stream_with_co_2g_names_partition_ab3 +-- depends_on: __dbt__cte__nested_stream_with_co_2g_names_partition_ab3 select _airbyte_nested_strea__nto_long_names_hashid, double_array_data, @@ -64,7 +64,7 @@ select CURRENT_TIMESTAMP as _airbyte_normalized_at, _airbyte_partition_hashid -from __dbt__CTE__nested_stream_with_co_2g_names_partition_ab3 +from __dbt__cte__nested_stream_with_co_2g_names_partition_ab3 -- partition at nested_stream_with_complex_columns_resulting_into_long_names/partition from test_normalization.`nested_stream_with_co_1g_into_long_names_scd` where 1 = 1 diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_co___names_partition_data.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_co___names_partition_data.sql index eea4c0c44827..b5bce12e294f 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_co___names_partition_data.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_co___names_partition_data.sql @@ -4,7 +4,7 @@ test_normalization.`nested_stream_with_co___names_partition_data__dbt_tmp` as ( -with __dbt__CTE__nested_stream_with_co_3es_partition_data_ab1 as ( +with __dbt__cte__nested_stream_with_co_3es_partition_data_ab1 as ( -- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema -- depends_on: test_normalization.`nested_stream_with_co___long_names_partition` @@ -20,7 +20,7 @@ with numbers as ( select - p0.generated_number * pow(2, 0) + p0.generated_number * power(2, 0) + 1 @@ -68,10 +68,10 @@ left join joined on _airbyte_partition_hashid = joined._airbyte_hashid where 1 = 1 and `DATA` is not null -), __dbt__CTE__nested_stream_with_co_3es_partition_data_ab2 as ( +), __dbt__cte__nested_stream_with_co_3es_partition_data_ab2 as ( -- SQL model to cast each column to its adequate SQL type converted from the JSON schema type --- depends_on: __dbt__CTE__nested_stream_with_co_3es_partition_data_ab1 +-- depends_on: __dbt__cte__nested_stream_with_co_3es_partition_data_ab1 select _airbyte_partition_hashid, cast(currency as char(1024)) as currency, @@ -80,23 +80,23 @@ select CURRENT_TIMESTAMP as _airbyte_normalized_at -from __dbt__CTE__nested_stream_with_co_3es_partition_data_ab1 +from __dbt__cte__nested_stream_with_co_3es_partition_data_ab1 -- DATA at nested_stream_with_complex_columns_resulting_into_long_names/partition/DATA where 1 = 1 -), __dbt__CTE__nested_stream_with_co_3es_partition_data_ab3 as ( +), __dbt__cte__nested_stream_with_co_3es_partition_data_ab3 as ( -- SQL model to build a hash column based on the values of this record --- depends_on: __dbt__CTE__nested_stream_with_co_3es_partition_data_ab2 +-- depends_on: __dbt__cte__nested_stream_with_co_3es_partition_data_ab2 select md5(cast(concat(coalesce(cast(_airbyte_partition_hashid as char), ''), '-', coalesce(cast(currency as char), '')) as char)) as _airbyte_data_hashid, tmp.* -from __dbt__CTE__nested_stream_with_co_3es_partition_data_ab2 tmp +from __dbt__cte__nested_stream_with_co_3es_partition_data_ab2 tmp -- DATA at nested_stream_with_complex_columns_resulting_into_long_names/partition/DATA where 1 = 1 )-- Final base SQL model --- depends_on: __dbt__CTE__nested_stream_with_co_3es_partition_data_ab3 +-- depends_on: __dbt__cte__nested_stream_with_co_3es_partition_data_ab3 select _airbyte_partition_hashid, currency, @@ -106,7 +106,7 @@ select CURRENT_TIMESTAMP as _airbyte_normalized_at, _airbyte_data_hashid -from __dbt__CTE__nested_stream_with_co_3es_partition_data_ab3 +from __dbt__cte__nested_stream_with_co_3es_partition_data_ab3 -- DATA at nested_stream_with_complex_columns_resulting_into_long_names/partition/DATA from test_normalization.`nested_stream_with_co___long_names_partition` where 1 = 1 diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_co__ion_double_array_data.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_co__ion_double_array_data.sql index 8819c2350a9a..279441127cad 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_co__ion_double_array_data.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_co__ion_double_array_data.sql @@ -4,7 +4,7 @@ test_normalization.`nested_stream_with_co__ion_double_array_data__dbt_tmp` as ( -with __dbt__CTE__nested_stream_with_co_3double_array_data_ab1 as ( +with __dbt__cte__nested_stream_with_co_3double_array_data_ab1 as ( -- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema -- depends_on: test_normalization.`nested_stream_with_co___long_names_partition` @@ -20,7 +20,7 @@ with numbers as ( select - p0.generated_number * pow(2, 0) + p0.generated_number * power(2, 0) + 1 @@ -68,10 +68,10 @@ left join joined on _airbyte_partition_hashid = joined._airbyte_hashid where 1 = 1 and double_array_data is not null -), __dbt__CTE__nested_stream_with_co_3double_array_data_ab2 as ( +), __dbt__cte__nested_stream_with_co_3double_array_data_ab2 as ( -- SQL model to cast each column to its adequate SQL type converted from the JSON schema type --- depends_on: __dbt__CTE__nested_stream_with_co_3double_array_data_ab1 +-- depends_on: __dbt__cte__nested_stream_with_co_3double_array_data_ab1 select _airbyte_partition_hashid, cast(id as char(1024)) as id, @@ -80,23 +80,23 @@ select CURRENT_TIMESTAMP as _airbyte_normalized_at -from __dbt__CTE__nested_stream_with_co_3double_array_data_ab1 +from __dbt__cte__nested_stream_with_co_3double_array_data_ab1 -- double_array_data at nested_stream_with_complex_columns_resulting_into_long_names/partition/double_array_data where 1 = 1 -), __dbt__CTE__nested_stream_with_co_3double_array_data_ab3 as ( +), __dbt__cte__nested_stream_with_co_3double_array_data_ab3 as ( -- SQL model to build a hash column based on the values of this record --- depends_on: __dbt__CTE__nested_stream_with_co_3double_array_data_ab2 +-- depends_on: __dbt__cte__nested_stream_with_co_3double_array_data_ab2 select md5(cast(concat(coalesce(cast(_airbyte_partition_hashid as char), ''), '-', coalesce(cast(id as char), '')) as char)) as _airbyte_double_array_data_hashid, tmp.* -from __dbt__CTE__nested_stream_with_co_3double_array_data_ab2 tmp +from __dbt__cte__nested_stream_with_co_3double_array_data_ab2 tmp -- double_array_data at nested_stream_with_complex_columns_resulting_into_long_names/partition/double_array_data where 1 = 1 )-- Final base SQL model --- depends_on: __dbt__CTE__nested_stream_with_co_3double_array_data_ab3 +-- depends_on: __dbt__cte__nested_stream_with_co_3double_array_data_ab3 select _airbyte_partition_hashid, id, @@ -106,7 +106,7 @@ select CURRENT_TIMESTAMP as _airbyte_normalized_at, _airbyte_double_array_data_hashid -from __dbt__CTE__nested_stream_with_co_3double_array_data_ab3 +from __dbt__cte__nested_stream_with_co_3double_array_data_ab3 -- double_array_data at nested_stream_with_complex_columns_resulting_into_long_names/partition/double_array_data from test_normalization.`nested_stream_with_co___long_names_partition` where 1 = 1 diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_co_1g_into_long_names_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_co_1g_into_long_names_ab1.sql index 22b025402fdc..d638e7a898ff 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_co_1g_into_long_names_ab1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_co_1g_into_long_names_ab1.sql @@ -15,5 +15,5 @@ select from {{ source('test_normalization', '_airbyte_raw_nested_s__lting_into_long_names') }} as table_alias -- nested_stream_with_co__lting_into_long_names where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_co_1g_into_long_names_ab2.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_co_1g_into_long_names_ab2.sql index 6f090707a2ba..a86a84248a87 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_co_1g_into_long_names_ab2.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_co_1g_into_long_names_ab2.sql @@ -15,5 +15,5 @@ select from {{ ref('nested_stream_with_co_1g_into_long_names_ab1') }} -- nested_stream_with_co__lting_into_long_names where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_co_2g_names_partition_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_co_2g_names_partition_ab1.sql index a98153d35d87..427a929211b2 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_co_2g_names_partition_ab1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_co_2g_names_partition_ab1.sql @@ -15,5 +15,5 @@ from {{ ref('nested_stream_with_co_1g_into_long_names_scd') }} as table_alias -- partition at nested_stream_with_complex_columns_resulting_into_long_names/partition where 1 = 1 and {{ adapter.quote('partition') }} is not null -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_co_3double_array_data_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_co_3double_array_data_ab1.sql index 2e8698e56951..a8ca4bbb7d40 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_co_3double_array_data_ab1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_co_3double_array_data_ab1.sql @@ -16,5 +16,5 @@ from {{ ref('nested_stream_with_co___long_names_partition') }} as table_alias {{ cross_join_unnest('partition', 'double_array_data') }} where 1 = 1 and double_array_data is not null -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_co_3es_partition_data_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_co_3es_partition_data_ab1.sql index 241d66624840..cdf1151ee10d 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_co_3es_partition_data_ab1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_co_3es_partition_data_ab1.sql @@ -16,5 +16,5 @@ from {{ ref('nested_stream_with_co___long_names_partition') }} as table_alias {{ cross_join_unnest('partition', adapter.quote('DATA')) }} where 1 = 1 and {{ adapter.quote('DATA') }} is not null -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_incremental/scd/test_normalization/nested_stream_with_co_1g_into_long_names_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_incremental/scd/test_normalization/nested_stream_with_co_1g_into_long_names_scd.sql index d0e8e603259f..9ffb6bd5558c 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_incremental/scd/test_normalization/nested_stream_with_co_1g_into_long_names_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_incremental/scd/test_normalization/nested_stream_with_co_1g_into_long_names_scd.sql @@ -1,7 +1,53 @@ {{ config( unique_key = "_airbyte_unique_key_scd", schema = "test_normalization", - post_hook = ["drop view _airbyte_test_normalization.nested_stream_with_co_1g_into_long_names_stg"], + post_hook = [" + {% + set final_table_relation = adapter.get_relation( + database=this.database, + schema=this.schema, + identifier='nested_stream_with_co__lting_into_long_names' + ) + %} + {# + If the final table doesn't exist, then obviously we can't delete anything from it. + Also, after a reset, the final table is created without the _airbyte_unique_key column (this column is created during the first sync) + So skip this deletion if the column doesn't exist. (in this case, the table is guaranteed to be empty anyway) + #} + {% + if final_table_relation is not none and '_airbyte_unique_key' in adapter.get_columns_in_relation(final_table_relation)|map(attribute='name') + %} + -- Delete records which are no longer active: + -- This query is equivalent, but the left join version is more performant: + -- delete from final_table where unique_key in ( + -- select unique_key from scd_table where 1 = 1 + -- ) and unique_key not in ( + -- select unique_key from scd_table where active_row = 1 + -- ) + -- We're incremental against normalized_at rather than emitted_at because we need to fetch the SCD + -- entries that were _updated_ recently. This is because a deleted record will have an SCD record + -- which was emitted a long time ago, but recently re-normalized to have active_row = 0. + delete from {{ final_table_relation }} where {{ final_table_relation }}._airbyte_unique_key in ( + select recent_records.unique_key + from ( + select distinct _airbyte_unique_key as unique_key + from {{ this }} + where 1=1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('nested_stream_with_co__lting_into_long_names')) }} + ) recent_records + left join ( + select _airbyte_unique_key as unique_key, count(_airbyte_unique_key) as active_count + from {{ this }} + where _airbyte_active_row = 1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('nested_stream_with_co__lting_into_long_names')) }} + group by _airbyte_unique_key + ) active_counts + on recent_records.unique_key = active_counts.unique_key + where active_count is null or active_count = 0 + ) + {% else %} + -- We have to have a non-empty query, so just do a noop delete + delete from {{ this }} where 1=0 + {% endif %} + ","drop view _airbyte_test_normalization.nested_stream_with_co_1g_into_long_names_stg"], tags = [ "top-level" ] ) }} -- depends_on: ref('nested_stream_with_co_1g_into_long_names_stg') @@ -14,7 +60,7 @@ new_data as ( from {{ ref('nested_stream_with_co_1g_into_long_names_stg') }} -- nested_stream_with_co__lting_into_long_names from {{ source('test_normalization', '_airbyte_raw_nested_s__lting_into_long_names') }} where 1 = 1 - {{ incremental_clause('_airbyte_emitted_at') }} + {{ incremental_clause('_airbyte_emitted_at', this) }} ), new_data_ids as ( -- build a subset of _airbyte_unique_key from rows that are new diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_co___long_names_partition.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_co___long_names_partition.sql index 155daecc1f2c..0c8adc779de9 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_co___long_names_partition.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_co___long_names_partition.sql @@ -15,5 +15,5 @@ select from {{ ref('nested_stream_with_co_2g_names_partition_ab3') }} -- partition at nested_stream_with_complex_columns_resulting_into_long_names/partition from {{ ref('nested_stream_with_co_1g_into_long_names_scd') }} where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_co___names_partition_data.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_co___names_partition_data.sql index 3dfd62364578..92e44abc9298 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_co___names_partition_data.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_co___names_partition_data.sql @@ -14,5 +14,5 @@ select from {{ ref('nested_stream_with_co_3es_partition_data_ab3') }} -- DATA at nested_stream_with_complex_columns_resulting_into_long_names/partition/DATA from {{ ref('nested_stream_with_co___long_names_partition') }} where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_co__ion_double_array_data.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_co__ion_double_array_data.sql index 3bd5623a7987..6a17d6258b3e 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_co__ion_double_array_data.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_co__ion_double_array_data.sql @@ -14,5 +14,5 @@ select from {{ ref('nested_stream_with_co_3double_array_data_ab3') }} -- double_array_data at nested_stream_with_complex_columns_resulting_into_long_names/partition/double_array_data from {{ ref('nested_stream_with_co___long_names_partition') }} where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_co__lting_into_long_names.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_co__lting_into_long_names.sql index f56a95685e58..0ea84390902e 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_co__lting_into_long_names.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_co__lting_into_long_names.sql @@ -18,5 +18,5 @@ from {{ ref('nested_stream_with_co_1g_into_long_names_scd') }} -- nested_stream_with_co__lting_into_long_names from {{ source('test_normalization', '_airbyte_raw_nested_s__lting_into_long_names') }} where 1 = 1 and _airbyte_active_row = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_co___long_names_partition.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_co___long_names_partition.sql index 90323e9b56b1..9d4975c21dac 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_co___long_names_partition.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_co___long_names_partition.sql @@ -4,7 +4,7 @@ test_normalization.`nested_stream_with_co___long_names_partition__dbt_tmp` as ( -with __dbt__CTE__nested_stream_with_co_2g_names_partition_ab1 as ( +with __dbt__cte__nested_stream_with_co_2g_names_partition_ab1 as ( -- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema -- depends_on: test_normalization.`nested_stream_with_co_1g_into_long_names_scd` @@ -24,10 +24,10 @@ from test_normalization.`nested_stream_with_co_1g_into_long_names_scd` as table_ where 1 = 1 and `partition` is not null -), __dbt__CTE__nested_stream_with_co_2g_names_partition_ab2 as ( +), __dbt__cte__nested_stream_with_co_2g_names_partition_ab2 as ( -- SQL model to cast each column to its adequate SQL type converted from the JSON schema type --- depends_on: __dbt__CTE__nested_stream_with_co_2g_names_partition_ab1 +-- depends_on: __dbt__cte__nested_stream_with_co_2g_names_partition_ab1 select _airbyte_nested_strea__nto_long_names_hashid, double_array_data, @@ -37,23 +37,23 @@ select CURRENT_TIMESTAMP as _airbyte_normalized_at -from __dbt__CTE__nested_stream_with_co_2g_names_partition_ab1 +from __dbt__cte__nested_stream_with_co_2g_names_partition_ab1 -- partition at nested_stream_with_complex_columns_resulting_into_long_names/partition where 1 = 1 -), __dbt__CTE__nested_stream_with_co_2g_names_partition_ab3 as ( +), __dbt__cte__nested_stream_with_co_2g_names_partition_ab3 as ( -- SQL model to build a hash column based on the values of this record --- depends_on: __dbt__CTE__nested_stream_with_co_2g_names_partition_ab2 +-- depends_on: __dbt__cte__nested_stream_with_co_2g_names_partition_ab2 select md5(cast(concat(coalesce(cast(_airbyte_nested_strea__nto_long_names_hashid as char), ''), '-', coalesce(cast(double_array_data as char), ''), '-', coalesce(cast(`DATA` as char), '')) as char)) as _airbyte_partition_hashid, tmp.* -from __dbt__CTE__nested_stream_with_co_2g_names_partition_ab2 tmp +from __dbt__cte__nested_stream_with_co_2g_names_partition_ab2 tmp -- partition at nested_stream_with_complex_columns_resulting_into_long_names/partition where 1 = 1 )-- Final base SQL model --- depends_on: __dbt__CTE__nested_stream_with_co_2g_names_partition_ab3 +-- depends_on: __dbt__cte__nested_stream_with_co_2g_names_partition_ab3 select _airbyte_nested_strea__nto_long_names_hashid, double_array_data, @@ -64,7 +64,7 @@ select CURRENT_TIMESTAMP as _airbyte_normalized_at, _airbyte_partition_hashid -from __dbt__CTE__nested_stream_with_co_2g_names_partition_ab3 +from __dbt__cte__nested_stream_with_co_2g_names_partition_ab3 -- partition at nested_stream_with_complex_columns_resulting_into_long_names/partition from test_normalization.`nested_stream_with_co_1g_into_long_names_scd` where 1 = 1 diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_co___names_partition_data.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_co___names_partition_data.sql index eea4c0c44827..b5bce12e294f 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_co___names_partition_data.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_co___names_partition_data.sql @@ -4,7 +4,7 @@ test_normalization.`nested_stream_with_co___names_partition_data__dbt_tmp` as ( -with __dbt__CTE__nested_stream_with_co_3es_partition_data_ab1 as ( +with __dbt__cte__nested_stream_with_co_3es_partition_data_ab1 as ( -- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema -- depends_on: test_normalization.`nested_stream_with_co___long_names_partition` @@ -20,7 +20,7 @@ with numbers as ( select - p0.generated_number * pow(2, 0) + p0.generated_number * power(2, 0) + 1 @@ -68,10 +68,10 @@ left join joined on _airbyte_partition_hashid = joined._airbyte_hashid where 1 = 1 and `DATA` is not null -), __dbt__CTE__nested_stream_with_co_3es_partition_data_ab2 as ( +), __dbt__cte__nested_stream_with_co_3es_partition_data_ab2 as ( -- SQL model to cast each column to its adequate SQL type converted from the JSON schema type --- depends_on: __dbt__CTE__nested_stream_with_co_3es_partition_data_ab1 +-- depends_on: __dbt__cte__nested_stream_with_co_3es_partition_data_ab1 select _airbyte_partition_hashid, cast(currency as char(1024)) as currency, @@ -80,23 +80,23 @@ select CURRENT_TIMESTAMP as _airbyte_normalized_at -from __dbt__CTE__nested_stream_with_co_3es_partition_data_ab1 +from __dbt__cte__nested_stream_with_co_3es_partition_data_ab1 -- DATA at nested_stream_with_complex_columns_resulting_into_long_names/partition/DATA where 1 = 1 -), __dbt__CTE__nested_stream_with_co_3es_partition_data_ab3 as ( +), __dbt__cte__nested_stream_with_co_3es_partition_data_ab3 as ( -- SQL model to build a hash column based on the values of this record --- depends_on: __dbt__CTE__nested_stream_with_co_3es_partition_data_ab2 +-- depends_on: __dbt__cte__nested_stream_with_co_3es_partition_data_ab2 select md5(cast(concat(coalesce(cast(_airbyte_partition_hashid as char), ''), '-', coalesce(cast(currency as char), '')) as char)) as _airbyte_data_hashid, tmp.* -from __dbt__CTE__nested_stream_with_co_3es_partition_data_ab2 tmp +from __dbt__cte__nested_stream_with_co_3es_partition_data_ab2 tmp -- DATA at nested_stream_with_complex_columns_resulting_into_long_names/partition/DATA where 1 = 1 )-- Final base SQL model --- depends_on: __dbt__CTE__nested_stream_with_co_3es_partition_data_ab3 +-- depends_on: __dbt__cte__nested_stream_with_co_3es_partition_data_ab3 select _airbyte_partition_hashid, currency, @@ -106,7 +106,7 @@ select CURRENT_TIMESTAMP as _airbyte_normalized_at, _airbyte_data_hashid -from __dbt__CTE__nested_stream_with_co_3es_partition_data_ab3 +from __dbt__cte__nested_stream_with_co_3es_partition_data_ab3 -- DATA at nested_stream_with_complex_columns_resulting_into_long_names/partition/DATA from test_normalization.`nested_stream_with_co___long_names_partition` where 1 = 1 diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_co__ion_double_array_data.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_co__ion_double_array_data.sql index 8819c2350a9a..279441127cad 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_co__ion_double_array_data.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_co__ion_double_array_data.sql @@ -4,7 +4,7 @@ test_normalization.`nested_stream_with_co__ion_double_array_data__dbt_tmp` as ( -with __dbt__CTE__nested_stream_with_co_3double_array_data_ab1 as ( +with __dbt__cte__nested_stream_with_co_3double_array_data_ab1 as ( -- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema -- depends_on: test_normalization.`nested_stream_with_co___long_names_partition` @@ -20,7 +20,7 @@ with numbers as ( select - p0.generated_number * pow(2, 0) + p0.generated_number * power(2, 0) + 1 @@ -68,10 +68,10 @@ left join joined on _airbyte_partition_hashid = joined._airbyte_hashid where 1 = 1 and double_array_data is not null -), __dbt__CTE__nested_stream_with_co_3double_array_data_ab2 as ( +), __dbt__cte__nested_stream_with_co_3double_array_data_ab2 as ( -- SQL model to cast each column to its adequate SQL type converted from the JSON schema type --- depends_on: __dbt__CTE__nested_stream_with_co_3double_array_data_ab1 +-- depends_on: __dbt__cte__nested_stream_with_co_3double_array_data_ab1 select _airbyte_partition_hashid, cast(id as char(1024)) as id, @@ -80,23 +80,23 @@ select CURRENT_TIMESTAMP as _airbyte_normalized_at -from __dbt__CTE__nested_stream_with_co_3double_array_data_ab1 +from __dbt__cte__nested_stream_with_co_3double_array_data_ab1 -- double_array_data at nested_stream_with_complex_columns_resulting_into_long_names/partition/double_array_data where 1 = 1 -), __dbt__CTE__nested_stream_with_co_3double_array_data_ab3 as ( +), __dbt__cte__nested_stream_with_co_3double_array_data_ab3 as ( -- SQL model to build a hash column based on the values of this record --- depends_on: __dbt__CTE__nested_stream_with_co_3double_array_data_ab2 +-- depends_on: __dbt__cte__nested_stream_with_co_3double_array_data_ab2 select md5(cast(concat(coalesce(cast(_airbyte_partition_hashid as char), ''), '-', coalesce(cast(id as char), '')) as char)) as _airbyte_double_array_data_hashid, tmp.* -from __dbt__CTE__nested_stream_with_co_3double_array_data_ab2 tmp +from __dbt__cte__nested_stream_with_co_3double_array_data_ab2 tmp -- double_array_data at nested_stream_with_complex_columns_resulting_into_long_names/partition/double_array_data where 1 = 1 )-- Final base SQL model --- depends_on: __dbt__CTE__nested_stream_with_co_3double_array_data_ab3 +-- depends_on: __dbt__cte__nested_stream_with_co_3double_array_data_ab3 select _airbyte_partition_hashid, id, @@ -106,7 +106,7 @@ select CURRENT_TIMESTAMP as _airbyte_normalized_at, _airbyte_double_array_data_hashid -from __dbt__CTE__nested_stream_with_co_3double_array_data_ab3 +from __dbt__cte__nested_stream_with_co_3double_array_data_ab3 -- double_array_data at nested_stream_with_complex_columns_resulting_into_long_names/partition/double_array_data from test_normalization.`nested_stream_with_co___long_names_partition` where 1 = 1 diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/dbt_project.yml b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/dbt_project.yml index db791a568a0b..5650b278a87c 100755 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/dbt_project.yml +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/dbt_project.yml @@ -1,45 +1,29 @@ -# This file is necessary to install dbt-utils with dbt deps -# the content will be overwritten by the transform function - -# Name your package! Package names should contain only lowercase characters -# and underscores. A good package name should reflect your organization's -# name or the intended use of these models -name: "airbyte_utils" -version: "1.0" +name: airbyte_utils +version: '1.0' config-version: 2 - -# This setting configures which "profile" dbt uses for this project. Profiles contain -# database connection information, and should be configured in the ~/.dbt/profiles.yml file -profile: "normalize" - -# These configurations specify where dbt should look for different types of files. -# The `source-paths` config, for example, states that source models can be found -# in the "models/" directory. You probably won't need to change these! -source-paths: ["models"] -docs-paths: ["docs"] -analysis-paths: ["analysis"] -test-paths: ["tests"] -data-paths: ["data"] -macro-paths: ["macros"] - -target-path: "../build" # directory which will store compiled SQL files -log-path: "../logs" # directory which will store DBT logs -modules-path: "/dbt" # directory which will store external DBT dependencies - -clean-targets: # directories to be removed by `dbt clean` - - "build" - - "dbt_modules" - +profile: normalize +model-paths: +- models +docs-paths: +- docs +analysis-paths: +- analysis +test-paths: +- tests +seed-paths: +- data +macro-paths: +- macros +target-path: ../build +log-path: ../logs +packages-install-path: /dbt +clean-targets: +- build +- dbt_modules quoting: database: true - # Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) - # all schemas should be unquoted schema: false identifier: true - -# You can define configurations for models in the `source-paths` directory here. -# Using these configurations, you can enable or disable models, change how they -# are materialized, and more! models: airbyte_utils: +materialized: table @@ -49,8 +33,6 @@ models: +materialized: ephemeral airbyte_incremental: +tags: incremental_tables - # incremental is not enabled for MySql yet - #+materialized: incremental +materialized: table airbyte_tables: +tags: normalized_tables @@ -58,6 +40,42 @@ models: airbyte_views: +tags: airbyte_internal_views +materialized: view - vars: - dbt_utils_dispatch_list: ["airbyte_utils"] + dbt_utils_dispatch_list: + - airbyte_utils + json_column: _airbyte_data + models_to_source: + exchange_rate_ab1: test_normalization._airbyte_raw_exchange_rate + exchange_rate_ab2: test_normalization._airbyte_raw_exchange_rate + exchange_rate_ab3: test_normalization._airbyte_raw_exchange_rate + exchange_rate: test_normalization._airbyte_raw_exchange_rate + dedup_exchange_rate_ab1: test_normalization._airbyte_raw_dedup_exchange_rate + dedup_exchange_rate_ab2: test_normalization._airbyte_raw_dedup_exchange_rate + dedup_exchange_rate_stg: test_normalization._airbyte_raw_dedup_exchange_rate + dedup_exchange_rate_scd: test_normalization._airbyte_raw_dedup_exchange_rate + dedup_exchange_rate: test_normalization._airbyte_raw_dedup_exchange_rate + renamed_dedup_cdc_excluded_ab1: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded_ab2: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded_stg: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded_scd: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + dedup_cdc_excluded_ab1: test_normalization._airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded_ab2: test_normalization._airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded_stg: test_normalization._airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded_scd: test_normalization._airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded: test_normalization._airbyte_raw_dedup_cdc_excluded + pos_dedup_cdcx_ab1: test_normalization._airbyte_raw_pos_dedup_cdcx + pos_dedup_cdcx_ab2: test_normalization._airbyte_raw_pos_dedup_cdcx + pos_dedup_cdcx_stg: test_normalization._airbyte_raw_pos_dedup_cdcx + pos_dedup_cdcx_scd: test_normalization._airbyte_raw_pos_dedup_cdcx + pos_dedup_cdcx: test_normalization._airbyte_raw_pos_dedup_cdcx + 1_prefix_startwith_number_ab1: test_normalization._airbyte_raw_1_prefix_startwith_number + 1_prefix_startwith_number_ab2: test_normalization._airbyte_raw_1_prefix_startwith_number + 1_prefix_startwith_number_stg: test_normalization._airbyte_raw_1_prefix_startwith_number + 1_prefix_startwith_number_scd: test_normalization._airbyte_raw_1_prefix_startwith_number + 1_prefix_startwith_number: test_normalization._airbyte_raw_1_prefix_startwith_number + multiple_column_names_conflicts_ab1: test_normalization._airbyte_raw_multiple_column_names_conflicts + multiple_column_names_conflicts_ab2: test_normalization._airbyte_raw_multiple_column_names_conflicts + multiple_column_names_conflicts_stg: test_normalization._airbyte_raw_multiple_column_names_conflicts + multiple_column_names_conflicts_scd: test_normalization._airbyte_raw_multiple_column_names_conflicts + multiple_column_names_conflicts: test_normalization._airbyte_raw_multiple_column_names_conflicts diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/first_output/airbyte_tables/test_normalization/exchange_rate.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/first_output/airbyte_tables/test_normalization/exchange_rate.sql index 0ee59f50d8e7..fd770070d75f 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/first_output/airbyte_tables/test_normalization/exchange_rate.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/first_output/airbyte_tables/test_normalization/exchange_rate.sql @@ -4,7 +4,7 @@ test_normalization.`exchange_rate__dbt_tmp` as ( -with __dbt__CTE__exchange_rate_ab1 as ( +with __dbt__cte__exchange_rate_ab1 as ( -- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema -- depends_on: test_normalization._airbyte_raw_exchange_rate @@ -35,10 +35,10 @@ select from test_normalization._airbyte_raw_exchange_rate as table_alias -- exchange_rate where 1 = 1 -), __dbt__CTE__exchange_rate_ab2 as ( +), __dbt__cte__exchange_rate_ab2 as ( -- SQL model to cast each column to its adequate SQL type converted from the JSON schema type --- depends_on: __dbt__CTE__exchange_rate_ab1 +-- depends_on: __dbt__cte__exchange_rate_ab1 select cast(id as signed @@ -65,21 +65,21 @@ select CURRENT_TIMESTAMP as _airbyte_normalized_at -from __dbt__CTE__exchange_rate_ab1 +from __dbt__cte__exchange_rate_ab1 -- exchange_rate where 1 = 1 -), __dbt__CTE__exchange_rate_ab3 as ( +), __dbt__cte__exchange_rate_ab3 as ( -- SQL model to build a hash column based on the values of this record --- depends_on: __dbt__CTE__exchange_rate_ab2 +-- depends_on: __dbt__cte__exchange_rate_ab2 select md5(cast(concat(coalesce(cast(id as char), ''), '-', coalesce(cast(currency as char), ''), '-', coalesce(cast(`date` as char), ''), '-', coalesce(cast(timestamp_col as char), ''), '-', coalesce(cast(`HKD@spƩƧiƤl & characters` as char), ''), '-', coalesce(cast(hkd_special___characters as char), ''), '-', coalesce(cast(nzd as char), ''), '-', coalesce(cast(usd as char), ''), '-', coalesce(cast(`column__'with"_quotes` as char), '')) as char)) as _airbyte_exchange_rate_hashid, tmp.* -from __dbt__CTE__exchange_rate_ab2 tmp +from __dbt__cte__exchange_rate_ab2 tmp -- exchange_rate where 1 = 1 )-- Final base SQL model --- depends_on: __dbt__CTE__exchange_rate_ab3 +-- depends_on: __dbt__cte__exchange_rate_ab3 select id, currency, @@ -96,7 +96,7 @@ select CURRENT_TIMESTAMP as _airbyte_normalized_at, _airbyte_exchange_rate_hashid -from __dbt__CTE__exchange_rate_ab3 +from __dbt__cte__exchange_rate_ab3 -- exchange_rate from test_normalization._airbyte_raw_exchange_rate where 1 = 1 ) diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/first_output/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/first_output/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql index 00d7e578d293..c736f9f1ec46 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/first_output/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/first_output/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql @@ -1,7 +1,7 @@ create view _airbyte_test_normalization.`dedup_exchange_rate_stg__dbt_tmp` as ( -with __dbt__CTE__dedup_exchange_rate_ab1 as ( +with __dbt__cte__dedup_exchange_rate_ab1 as ( -- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema -- depends_on: test_normalization._airbyte_raw_dedup_exchange_rate @@ -31,10 +31,10 @@ from test_normalization._airbyte_raw_dedup_exchange_rate as table_alias -- dedup_exchange_rate where 1 = 1 -), __dbt__CTE__dedup_exchange_rate_ab2 as ( +), __dbt__cte__dedup_exchange_rate_ab2 as ( -- SQL model to cast each column to its adequate SQL type converted from the JSON schema type --- depends_on: __dbt__CTE__dedup_exchange_rate_ab1 +-- depends_on: __dbt__cte__dedup_exchange_rate_ab1 select cast(id as signed @@ -60,17 +60,17 @@ select CURRENT_TIMESTAMP as _airbyte_normalized_at -from __dbt__CTE__dedup_exchange_rate_ab1 +from __dbt__cte__dedup_exchange_rate_ab1 -- dedup_exchange_rate where 1 = 1 )-- SQL model to build a hash column based on the values of this record --- depends_on: __dbt__CTE__dedup_exchange_rate_ab2 +-- depends_on: __dbt__cte__dedup_exchange_rate_ab2 select md5(cast(concat(coalesce(cast(id as char), ''), '-', coalesce(cast(currency as char), ''), '-', coalesce(cast(`date` as char), ''), '-', coalesce(cast(timestamp_col as char), ''), '-', coalesce(cast(`HKD@spƩƧiƤl & characters` as char), ''), '-', coalesce(cast(hkd_special___characters as char), ''), '-', coalesce(cast(nzd as char), ''), '-', coalesce(cast(usd as char), '')) as char)) as _airbyte_dedup_exchange_rate_hashid, tmp.* -from __dbt__CTE__dedup_exchange_rate_ab2 tmp +from __dbt__cte__dedup_exchange_rate_ab2 tmp -- dedup_exchange_rate where 1 = 1 - ); + ); \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/first_output/airbyte_views/test_normalization/multiple_column_names_conflicts_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/first_output/airbyte_views/test_normalization/multiple_column_names_conflicts_stg.sql index 55de2948c5ef..b9356dd1f6ba 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/first_output/airbyte_views/test_normalization/multiple_column_names_conflicts_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/first_output/airbyte_views/test_normalization/multiple_column_names_conflicts_stg.sql @@ -1,7 +1,7 @@ create view _airbyte_test_normalization.`multiple_column_names_conflicts_stg__dbt_tmp` as ( -with __dbt__CTE__multiple_column_names_conflicts_ab1 as ( +with __dbt__cte__multiple_column_names_conflicts_ab1 as ( -- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema -- depends_on: test_normalization._airbyte_raw_multiple_column_names_conflicts @@ -29,10 +29,10 @@ from test_normalization._airbyte_raw_multiple_column_names_conflicts as table_al -- multiple_column_names_conflicts where 1 = 1 -), __dbt__CTE__multiple_column_names_conflicts_ab2 as ( +), __dbt__cte__multiple_column_names_conflicts_ab2 as ( -- SQL model to cast each column to its adequate SQL type converted from the JSON schema type --- depends_on: __dbt__CTE__multiple_column_names_conflicts_ab1 +-- depends_on: __dbt__cte__multiple_column_names_conflicts_ab1 select cast(id as signed @@ -56,17 +56,17 @@ select CURRENT_TIMESTAMP as _airbyte_normalized_at -from __dbt__CTE__multiple_column_names_conflicts_ab1 +from __dbt__cte__multiple_column_names_conflicts_ab1 -- multiple_column_names_conflicts where 1 = 1 )-- SQL model to build a hash column based on the values of this record --- depends_on: __dbt__CTE__multiple_column_names_conflicts_ab2 +-- depends_on: __dbt__cte__multiple_column_names_conflicts_ab2 select md5(cast(concat(coalesce(cast(id as char), ''), '-', coalesce(cast(`User Id` as char), ''), '-', coalesce(cast(user_id as char), ''), '-', coalesce(cast(`User id_1` as char), ''), '-', coalesce(cast(`user id_2` as char), ''), '-', coalesce(cast(`User@Id` as char), ''), '-', coalesce(cast(userid as char), '')) as char)) as _airbyte_multiple_col__ames_conflicts_hashid, tmp.* -from __dbt__CTE__multiple_column_names_conflicts_ab2 tmp +from __dbt__cte__multiple_column_names_conflicts_ab2 tmp -- multiple_column_names_conflicts where 1 = 1 - ); + ); \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql index 9b09b69fc5c2..670db0869ae2 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql @@ -20,5 +20,5 @@ select from {{ source('test_normalization', '_airbyte_raw_dedup_exchange_rate') }} as table_alias -- dedup_exchange_rate where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql index ca93b9a8d536..6ac42bbbe476 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql @@ -23,5 +23,5 @@ select from {{ ref('dedup_exchange_rate_ab1') }} -- dedup_exchange_rate where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql index 9bf09bdcaa8f..b1c2af62e4bf 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql @@ -1,7 +1,53 @@ {{ config( unique_key = "_airbyte_unique_key_scd", schema = "test_normalization", - post_hook = ["drop view _airbyte_test_normalization.dedup_exchange_rate_stg"], + post_hook = [" + {% + set final_table_relation = adapter.get_relation( + database=this.database, + schema=this.schema, + identifier='dedup_exchange_rate' + ) + %} + {# + If the final table doesn't exist, then obviously we can't delete anything from it. + Also, after a reset, the final table is created without the _airbyte_unique_key column (this column is created during the first sync) + So skip this deletion if the column doesn't exist. (in this case, the table is guaranteed to be empty anyway) + #} + {% + if final_table_relation is not none and '_airbyte_unique_key' in adapter.get_columns_in_relation(final_table_relation)|map(attribute='name') + %} + -- Delete records which are no longer active: + -- This query is equivalent, but the left join version is more performant: + -- delete from final_table where unique_key in ( + -- select unique_key from scd_table where 1 = 1 + -- ) and unique_key not in ( + -- select unique_key from scd_table where active_row = 1 + -- ) + -- We're incremental against normalized_at rather than emitted_at because we need to fetch the SCD + -- entries that were _updated_ recently. This is because a deleted record will have an SCD record + -- which was emitted a long time ago, but recently re-normalized to have active_row = 0. + delete from {{ final_table_relation }} where {{ final_table_relation }}._airbyte_unique_key in ( + select recent_records.unique_key + from ( + select distinct _airbyte_unique_key as unique_key + from {{ this }} + where 1=1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('dedup_exchange_rate')) }} + ) recent_records + left join ( + select _airbyte_unique_key as unique_key, count(_airbyte_unique_key) as active_count + from {{ this }} + where _airbyte_active_row = 1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('dedup_exchange_rate')) }} + group by _airbyte_unique_key + ) active_counts + on recent_records.unique_key = active_counts.unique_key + where active_count is null or active_count = 0 + ) + {% else %} + -- We have to have a non-empty query, so just do a noop delete + delete from {{ this }} where 1=0 + {% endif %} + ","drop view _airbyte_test_normalization.dedup_exchange_rate_stg"], tags = [ "top-level" ] ) }} -- depends_on: ref('dedup_exchange_rate_stg') @@ -14,7 +60,7 @@ new_data as ( from {{ ref('dedup_exchange_rate_stg') }} -- dedup_exchange_rate from {{ source('test_normalization', '_airbyte_raw_dedup_exchange_rate') }} where 1 = 1 - {{ incremental_clause('_airbyte_emitted_at') }} + {{ incremental_clause('_airbyte_emitted_at', this) }} ), new_data_ids as ( -- build a subset of _airbyte_unique_key from rows that are new diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql index 07a2d8f3765c..dd4432bd60a5 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql @@ -23,5 +23,5 @@ from {{ ref('dedup_exchange_rate_scd') }} -- dedup_exchange_rate from {{ source('test_normalization', '_airbyte_raw_dedup_exchange_rate') }} where 1 = 1 and _airbyte_active_row = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/models/generated/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/models/generated/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql index 57c500151e06..86ec2c9e8b1b 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/models/generated/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/models/generated/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql @@ -20,5 +20,5 @@ select from {{ ref('dedup_exchange_rate_ab2') }} tmp -- dedup_exchange_rate where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/second_output/airbyte_tables/test_normalization/exchange_rate.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/second_output/airbyte_tables/test_normalization/exchange_rate.sql index 0ee59f50d8e7..fd770070d75f 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/second_output/airbyte_tables/test_normalization/exchange_rate.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/second_output/airbyte_tables/test_normalization/exchange_rate.sql @@ -4,7 +4,7 @@ test_normalization.`exchange_rate__dbt_tmp` as ( -with __dbt__CTE__exchange_rate_ab1 as ( +with __dbt__cte__exchange_rate_ab1 as ( -- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema -- depends_on: test_normalization._airbyte_raw_exchange_rate @@ -35,10 +35,10 @@ select from test_normalization._airbyte_raw_exchange_rate as table_alias -- exchange_rate where 1 = 1 -), __dbt__CTE__exchange_rate_ab2 as ( +), __dbt__cte__exchange_rate_ab2 as ( -- SQL model to cast each column to its adequate SQL type converted from the JSON schema type --- depends_on: __dbt__CTE__exchange_rate_ab1 +-- depends_on: __dbt__cte__exchange_rate_ab1 select cast(id as signed @@ -65,21 +65,21 @@ select CURRENT_TIMESTAMP as _airbyte_normalized_at -from __dbt__CTE__exchange_rate_ab1 +from __dbt__cte__exchange_rate_ab1 -- exchange_rate where 1 = 1 -), __dbt__CTE__exchange_rate_ab3 as ( +), __dbt__cte__exchange_rate_ab3 as ( -- SQL model to build a hash column based on the values of this record --- depends_on: __dbt__CTE__exchange_rate_ab2 +-- depends_on: __dbt__cte__exchange_rate_ab2 select md5(cast(concat(coalesce(cast(id as char), ''), '-', coalesce(cast(currency as char), ''), '-', coalesce(cast(`date` as char), ''), '-', coalesce(cast(timestamp_col as char), ''), '-', coalesce(cast(`HKD@spƩƧiƤl & characters` as char), ''), '-', coalesce(cast(hkd_special___characters as char), ''), '-', coalesce(cast(nzd as char), ''), '-', coalesce(cast(usd as char), ''), '-', coalesce(cast(`column__'with"_quotes` as char), '')) as char)) as _airbyte_exchange_rate_hashid, tmp.* -from __dbt__CTE__exchange_rate_ab2 tmp +from __dbt__cte__exchange_rate_ab2 tmp -- exchange_rate where 1 = 1 )-- Final base SQL model --- depends_on: __dbt__CTE__exchange_rate_ab3 +-- depends_on: __dbt__cte__exchange_rate_ab3 select id, currency, @@ -96,7 +96,7 @@ select CURRENT_TIMESTAMP as _airbyte_normalized_at, _airbyte_exchange_rate_hashid -from __dbt__CTE__exchange_rate_ab3 +from __dbt__cte__exchange_rate_ab3 -- exchange_rate from test_normalization._airbyte_raw_exchange_rate where 1 = 1 ) diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/second_output/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/second_output/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql index 00d7e578d293..c736f9f1ec46 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/second_output/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/mysql/test_simple_streams/second_output/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql @@ -1,7 +1,7 @@ create view _airbyte_test_normalization.`dedup_exchange_rate_stg__dbt_tmp` as ( -with __dbt__CTE__dedup_exchange_rate_ab1 as ( +with __dbt__cte__dedup_exchange_rate_ab1 as ( -- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema -- depends_on: test_normalization._airbyte_raw_dedup_exchange_rate @@ -31,10 +31,10 @@ from test_normalization._airbyte_raw_dedup_exchange_rate as table_alias -- dedup_exchange_rate where 1 = 1 -), __dbt__CTE__dedup_exchange_rate_ab2 as ( +), __dbt__cte__dedup_exchange_rate_ab2 as ( -- SQL model to cast each column to its adequate SQL type converted from the JSON schema type --- depends_on: __dbt__CTE__dedup_exchange_rate_ab1 +-- depends_on: __dbt__cte__dedup_exchange_rate_ab1 select cast(id as signed @@ -60,17 +60,17 @@ select CURRENT_TIMESTAMP as _airbyte_normalized_at -from __dbt__CTE__dedup_exchange_rate_ab1 +from __dbt__cte__dedup_exchange_rate_ab1 -- dedup_exchange_rate where 1 = 1 )-- SQL model to build a hash column based on the values of this record --- depends_on: __dbt__CTE__dedup_exchange_rate_ab2 +-- depends_on: __dbt__cte__dedup_exchange_rate_ab2 select md5(cast(concat(coalesce(cast(id as char), ''), '-', coalesce(cast(currency as char), ''), '-', coalesce(cast(`date` as char), ''), '-', coalesce(cast(timestamp_col as char), ''), '-', coalesce(cast(`HKD@spƩƧiƤl & characters` as char), ''), '-', coalesce(cast(hkd_special___characters as char), ''), '-', coalesce(cast(nzd as char), ''), '-', coalesce(cast(usd as char), '')) as char)) as _airbyte_dedup_exchange_rate_hashid, tmp.* -from __dbt__CTE__dedup_exchange_rate_ab2 tmp +from __dbt__cte__dedup_exchange_rate_ab2 tmp -- dedup_exchange_rate where 1 = 1 - ); + ); \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/oracle/test_simple_streams/dbt_project.yml b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/oracle/test_simple_streams/dbt_project.yml index 7ad95ea5f941..a696787c00ab 100755 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/oracle/test_simple_streams/dbt_project.yml +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/oracle/test_simple_streams/dbt_project.yml @@ -1,43 +1,29 @@ -# This file is necessary to install dbt-utils with dbt deps -# the content will be overwritten by the transform function - -# Name your package! Package names should contain only lowercase characters -# and underscores. A good package name should reflect your organization's -# name or the intended use of these models -name: "airbyte_utils" +name: airbyte_utils version: "1.0" config-version: 2 - -# This setting configures which "profile" dbt uses for this project. Profiles contain -# database connection information, and should be configured in the ~/.dbt/profiles.yml file -profile: "normalize" - -# These configurations specify where dbt should look for different types of files. -# The `source-paths` config, for example, states that source models can be found -# in the "models/" directory. You probably won't need to change these! -source-paths: ["models"] -docs-paths: ["docs"] -analysis-paths: ["analysis"] -test-paths: ["tests"] -data-paths: ["data"] -macro-paths: ["macros"] - -target-path: "../build" # directory which will store compiled SQL files -log-path: "../logs" # directory which will store DBT logs -modules-path: "/dbt" # directory which will store external DBT dependencies - -clean-targets: # directories to be removed by `dbt clean` - - "build" - - "dbt_modules" - +profile: normalize +source-paths: + - models +docs-paths: + - docs +analysis-paths: + - analysis +test-paths: + - tests +data-paths: + - data +macro-paths: + - macros +target-path: ../build +log-path: ../logs +modules-path: /dbt +clean-targets: + - build + - dbt_modules quoting: database: false schema: false identifier: false - -# You can define configurations for models in the `source-paths` directory here. -# Using these configurations, you can enable or disable models, change how they -# are materialized, and more! models: airbyte_utils: +materialized: table @@ -47,8 +33,6 @@ models: +materialized: ephemeral airbyte_incremental: +tags: incremental_tables - # incremental is not enabled for Oracle yet - #+materialized: incremental +materialized: table airbyte_tables: +tags: normalized_tables @@ -56,6 +40,42 @@ models: airbyte_views: +tags: airbyte_internal_views +materialized: view - vars: - dbt_utils_dispatch_list: ["airbyte_utils"] + dbt_utils_dispatch_list: + - airbyte_utils + json_column: _airbyte_data + models_to_source: + exchange_rate_ab1: test_normalization.airbyte_raw_exchange_rate + exchange_rate_ab2: test_normalization.airbyte_raw_exchange_rate + exchange_rate_ab3: test_normalization.airbyte_raw_exchange_rate + exchange_rate: test_normalization.airbyte_raw_exchange_rate + dedup_exchange_rate_ab1: test_normalization.airbyte_raw_dedup_exchange_rate + dedup_exchange_rate_ab2: test_normalization.airbyte_raw_dedup_exchange_rate + dedup_exchange_rate_stg: test_normalization.airbyte_raw_dedup_exchange_rate + dedup_exchange_rate_scd: test_normalization.airbyte_raw_dedup_exchange_rate + dedup_exchange_rate: test_normalization.airbyte_raw_dedup_exchange_rate + renamed_dedup_cdc_excluded_ab1: test_normalization.airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded_ab2: test_normalization.airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded_stg: test_normalization.airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded_scd: test_normalization.airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded: test_normalization.airbyte_raw_renamed_dedup_cdc_excluded + dedup_cdc_excluded_ab1: test_normalization.airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded_ab2: test_normalization.airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded_stg: test_normalization.airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded_scd: test_normalization.airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded: test_normalization.airbyte_raw_dedup_cdc_excluded + pos_dedup_cdcx_ab1: test_normalization.airbyte_raw_pos_dedup_cdcx + pos_dedup_cdcx_ab2: test_normalization.airbyte_raw_pos_dedup_cdcx + pos_dedup_cdcx_stg: test_normalization.airbyte_raw_pos_dedup_cdcx + pos_dedup_cdcx_scd: test_normalization.airbyte_raw_pos_dedup_cdcx + pos_dedup_cdcx: test_normalization.airbyte_raw_pos_dedup_cdcx + ab_1_prefix_startwith_number_ab1: test_normalization.airbyte_raw_1_prefix_startwith_number + ab_1_prefix_startwith_number_ab2: test_normalization.airbyte_raw_1_prefix_startwith_number + ab_1_prefix_startwith_number_stg: test_normalization.airbyte_raw_1_prefix_startwith_number + ab_1_prefix_startwith_number_scd: test_normalization.airbyte_raw_1_prefix_startwith_number + ab_1_prefix_startwith_number: test_normalization.airbyte_raw_1_prefix_startwith_number + multiple_column_names_conflicts_ab1: test_normalization.airbyte_raw_multiple_column_names_conflicts + multiple_column_names_conflicts_ab2: test_normalization.airbyte_raw_multiple_column_names_conflicts + multiple_column_names_conflicts_stg: test_normalization.airbyte_raw_multiple_column_names_conflicts + multiple_column_names_conflicts_scd: test_normalization.airbyte_raw_multiple_column_names_conflicts + multiple_column_names_conflicts: test_normalization.airbyte_raw_multiple_column_names_conflicts diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/oracle/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/oracle/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql index ccd95966bfc7..f6b2863d9c44 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/oracle/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/oracle/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql @@ -20,5 +20,5 @@ select from {{ source('test_normalization', 'airbyte_raw_dedup_exchange_rate') }} -- dedup_exchange_rate where 1 = 1 -{{ incremental_clause(quote('_AIRBYTE_EMITTED_AT')) }} +{{ incremental_clause(quote('_AIRBYTE_EMITTED_AT'), this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/oracle/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/oracle/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql index 97defa7b1ba2..f3158bc2e919 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/oracle/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/oracle/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql @@ -20,5 +20,5 @@ select from {{ ref('dedup_exchange_rate_ab1') }} -- dedup_exchange_rate where 1 = 1 -{{ incremental_clause(quote('_AIRBYTE_EMITTED_AT')) }} +{{ incremental_clause(quote('_AIRBYTE_EMITTED_AT'), this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/oracle/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/oracle/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql index 712f6bd74752..9320dbc51f60 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/oracle/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/oracle/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql @@ -1,7 +1,53 @@ {{ config( unique_key = "{{ quote('_AIRBYTE_UNIQUE_KEY_SCD') }}", schema = "test_normalization", - post_hook = ["drop view test_normalization.dedup_exchange_rate_stg"], + post_hook = [" + {% + set final_table_relation = adapter.get_relation( + database=this.database, + schema=this.schema, + identifier='dedup_exchange_rate' + ) + %} + {# + If the final table doesn't exist, then obviously we can't delete anything from it. + Also, after a reset, the final table is created without the _airbyte_unique_key column (this column is created during the first sync) + So skip this deletion if the column doesn't exist. (in this case, the table is guaranteed to be empty anyway) + #} + {% + if final_table_relation is not none and quote('_AIRBYTE_UNIQUE_KEY') in adapter.get_columns_in_relation(final_table_relation)|map(attribute='name') + %} + -- Delete records which are no longer active: + -- This query is equivalent, but the left join version is more performant: + -- delete from final_table where unique_key in ( + -- select unique_key from scd_table where 1 = 1 + -- ) and unique_key not in ( + -- select unique_key from scd_table where active_row = 1 + -- ) + -- We're incremental against normalized_at rather than emitted_at because we need to fetch the SCD + -- entries that were _updated_ recently. This is because a deleted record will have an SCD record + -- which was emitted a long time ago, but recently re-normalized to have active_row = 0. + delete from {{ final_table_relation }} where {{ final_table_relation }}.{{ quote('_AIRBYTE_UNIQUE_KEY') }} in ( + select recent_records.unique_key + from ( + select distinct {{ quote('_AIRBYTE_UNIQUE_KEY') }} as unique_key + from {{ this }} + where 1=1 {{ incremental_clause(quote('_AIRBYTE_NORMALIZED_AT'), this.schema + '.' + quote('dedup_exchange_rate')) }} + ) recent_records + left join ( + select {{ quote('_AIRBYTE_UNIQUE_KEY') }} as unique_key, count({{ quote('_AIRBYTE_UNIQUE_KEY') }}) as active_count + from {{ this }} + where {{ quote('_AIRBYTE_ACTIVE_ROW') }} = 1 {{ incremental_clause(quote('_AIRBYTE_NORMALIZED_AT'), this.schema + '.' + quote('dedup_exchange_rate')) }} + group by {{ quote('_AIRBYTE_UNIQUE_KEY') }} + ) active_counts + on recent_records.unique_key = active_counts.unique_key + where active_count is null or active_count = 0 + ) + {% else %} + -- We have to have a non-empty query, so just do a noop delete + delete from {{ this }} where 1=0 + {% endif %} + ","drop view test_normalization.dedup_exchange_rate_stg"], tags = [ "top-level" ] ) }} -- depends_on: ref('dedup_exchange_rate_stg') @@ -14,7 +60,7 @@ new_data as ( from {{ ref('dedup_exchange_rate_stg') }} -- dedup_exchange_rate from {{ source('test_normalization', 'airbyte_raw_dedup_exchange_rate') }} where 1 = 1 - {{ incremental_clause(quote('_AIRBYTE_EMITTED_AT')) }} + {{ incremental_clause(quote('_AIRBYTE_EMITTED_AT'), this) }} ), new_data_ids as ( -- build a subset of {{ quote('_AIRBYTE_UNIQUE_KEY') }} from rows that are new diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/oracle/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/oracle/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql index e361c6364809..316e40041835 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/oracle/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/oracle/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql @@ -23,5 +23,5 @@ from {{ ref('dedup_exchange_rate_scd') }} -- dedup_exchange_rate from {{ source('test_normalization', 'airbyte_raw_dedup_exchange_rate') }} where 1 = 1 and {{ quote('_AIRBYTE_ACTIVE_ROW') }} = 1 -{{ incremental_clause(quote('_AIRBYTE_EMITTED_AT')) }} +{{ incremental_clause(quote('_AIRBYTE_EMITTED_AT'), this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/oracle/test_simple_streams/models/generated/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/oracle/test_simple_streams/models/generated/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql index c78f87fe59f1..15c9c07d71e9 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/oracle/test_simple_streams/models/generated/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/oracle/test_simple_streams/models/generated/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql @@ -20,5 +20,5 @@ select from {{ ref('dedup_exchange_rate_ab2') }} tmp -- dedup_exchange_rate where 1 = 1 -{{ incremental_clause(quote('_AIRBYTE_EMITTED_AT')) }} +{{ incremental_clause(quote('_AIRBYTE_EMITTED_AT'), this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/dbt_project.yml b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/dbt_project.yml index 7631ef356dc9..fa54af3b1a08 100755 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/dbt_project.yml +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/dbt_project.yml @@ -1,45 +1,29 @@ -# This file is necessary to install dbt-utils with dbt deps -# the content will be overwritten by the transform function - -# Name your package! Package names should contain only lowercase characters -# and underscores. A good package name should reflect your organization's -# name or the intended use of these models -name: "airbyte_utils" +name: airbyte_utils version: "1.0" config-version: 2 - -# This setting configures which "profile" dbt uses for this project. Profiles contain -# database connection information, and should be configured in the ~/.dbt/profiles.yml file -profile: "normalize" - -# These configurations specify where dbt should look for different types of files. -# The `model-paths` config, for example, states that source models can be found -# in the "models/" directory. You probably won't need to change these! -model-paths: ["models"] -docs-paths: ["docs"] -analysis-paths: ["analysis"] -test-paths: ["tests"] -seed-paths: ["data"] -macro-paths: ["macros"] - -target-path: "../build" # directory which will store compiled SQL files -log-path: "../logs" # directory which will store DBT logs -packages-install-path: "/dbt" # directory which will store external DBT dependencies - -clean-targets: # directories to be removed by `dbt clean` - - "build" - - "dbt_modules" - +profile: normalize +model-paths: + - models +docs-paths: + - docs +analysis-paths: + - analysis +test-paths: + - tests +seed-paths: + - data +macro-paths: + - macros +target-path: ../build +log-path: ../logs +packages-install-path: /dbt +clean-targets: + - build + - dbt_modules quoting: database: true - # Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) - # all schemas should be unquoted schema: false identifier: true - -# You can define configurations for models in the `model-paths` directory here. -# Using these configurations, you can enable or disable models, change how they -# are materialized, and more! models: airbyte_utils: +materialized: table @@ -57,7 +41,77 @@ models: airbyte_views: +tags: airbyte_internal_views +materialized: view - dispatch: - macro_namespace: dbt_utils - search_order: ["airbyte_utils", "dbt_utils"] + search_order: + - airbyte_utils + - dbt_utils +vars: + json_column: _airbyte_data + models_to_source: + nested_stream_with_c__lting_into_long_names_ab1: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_c__lting_into_long_names_ab2: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_c__lting_into_long_names_stg: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_c__lting_into_long_names_scd: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_c__lting_into_long_names: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + non_nested_stream_wi__lting_into_long_names_ab1: test_normalization._airbyte_raw_non_nested_stream_without_namespace_resulting_into_long_names + non_nested_stream_wi__lting_into_long_names_ab2: test_normalization._airbyte_raw_non_nested_stream_without_namespace_resulting_into_long_names + non_nested_stream_wi__lting_into_long_names_ab3: test_normalization._airbyte_raw_non_nested_stream_without_namespace_resulting_into_long_names + non_nested_stream_wi__lting_into_long_names: test_normalization._airbyte_raw_non_nested_stream_without_namespace_resulting_into_long_names + some_stream_that_was_empty_ab1: test_normalization._airbyte_raw_some_stream_that_was_empty + some_stream_that_was_empty_ab2: test_normalization._airbyte_raw_some_stream_that_was_empty + some_stream_that_was_empty_stg: test_normalization._airbyte_raw_some_stream_that_was_empty + some_stream_that_was_empty_scd: test_normalization._airbyte_raw_some_stream_that_was_empty + some_stream_that_was_empty: test_normalization._airbyte_raw_some_stream_that_was_empty + simple_stream_with_n__lting_into_long_names_ab1: test_normalization_namespace._airbyte_raw_simple_stream_with_namespace_resulting_into_long_names + simple_stream_with_n__lting_into_long_names_ab2: test_normalization_namespace._airbyte_raw_simple_stream_with_namespace_resulting_into_long_names + simple_stream_with_n__lting_into_long_names_ab3: test_normalization_namespace._airbyte_raw_simple_stream_with_namespace_resulting_into_long_names + simple_stream_with_n__lting_into_long_names: test_normalization_namespace._airbyte_raw_simple_stream_with_namespace_resulting_into_long_names + conflict_stream_name_ab1: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name_ab2: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name_ab3: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_scalar_ab1: test_normalization._airbyte_raw_conflict_stream_scalar + conflict_stream_scalar_ab2: test_normalization._airbyte_raw_conflict_stream_scalar + conflict_stream_scalar_ab3: test_normalization._airbyte_raw_conflict_stream_scalar + conflict_stream_scalar: test_normalization._airbyte_raw_conflict_stream_scalar + conflict_stream_array_ab1: test_normalization._airbyte_raw_conflict_stream_array + conflict_stream_array_ab2: test_normalization._airbyte_raw_conflict_stream_array + conflict_stream_array_ab3: test_normalization._airbyte_raw_conflict_stream_array + conflict_stream_array: test_normalization._airbyte_raw_conflict_stream_array + unnest_alias_ab1: test_normalization._airbyte_raw_unnest_alias + unnest_alias_ab2: test_normalization._airbyte_raw_unnest_alias + unnest_alias_ab3: test_normalization._airbyte_raw_unnest_alias + unnest_alias: test_normalization._airbyte_raw_unnest_alias + nested_stream_with_c___long_names_partition_ab1: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_c___long_names_partition_ab2: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_c___long_names_partition_ab3: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_c___long_names_partition: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + conflict_stream_name_conflict_stream_name_ab1: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name_conflict_stream_name_ab2: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name_conflict_stream_name_ab3: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name_conflict_stream_name: test_normalization._airbyte_raw_conflict_stream_name + unnest_alias_children_ab1: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children_ab2: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children_ab3: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children: test_normalization._airbyte_raw_unnest_alias + nested_stream_with_c__ion_double_array_data_ab1: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_c__ion_double_array_data_ab2: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_c__ion_double_array_data_ab3: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_c__ion_double_array_data: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_c___names_partition_data_ab1: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_c___names_partition_data_ab2: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_c___names_partition_data_ab3: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_c___names_partition_data: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + conflict_stream_name___conflict_stream_name_ab1: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name___conflict_stream_name_ab2: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name___conflict_stream_name_ab3: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name___conflict_stream_name: test_normalization._airbyte_raw_conflict_stream_name + unnest_alias_children_owner_ab1: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children_owner_ab2: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children_owner_ab3: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children_owner: test_normalization._airbyte_raw_unnest_alias + unnest_alias_childre__column___with__quotes_ab1: test_normalization._airbyte_raw_unnest_alias + unnest_alias_childre__column___with__quotes_ab2: test_normalization._airbyte_raw_unnest_alias + unnest_alias_childre__column___with__quotes_ab3: test_normalization._airbyte_raw_unnest_alias + unnest_alias_childre__column___with__quotes: test_normalization._airbyte_raw_unnest_alias diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_incremental/scd/test_normalization/nested_stream_with_c__lting_into_long_names_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_incremental/scd/test_normalization/nested_stream_with_c__lting_into_long_names_scd.sql index b5d7f740ba6e..150407b1fbdf 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_incremental/scd/test_normalization/nested_stream_with_c__lting_into_long_names_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_incremental/scd/test_normalization/nested_stream_with_c__lting_into_long_names_scd.sql @@ -16,11 +16,7 @@ input_data as ( scd_data as ( -- SQL model to build a Type 2 Slowly Changing Dimension (SCD) table for each record identified by their primary key select - md5(cast(coalesce(cast("id" as - varchar -), '') as - varchar -)) as _airbyte_unique_key, + md5(cast(coalesce(cast("id" as text), '') as text)) as _airbyte_unique_key, "id", "date", "partition", @@ -55,15 +51,7 @@ dedup_data as ( _airbyte_emitted_at order by _airbyte_active_row desc, _airbyte_ab_id ) as _airbyte_row_num, - md5(cast(coalesce(cast(_airbyte_unique_key as - varchar -), '') || '-' || coalesce(cast(_airbyte_start_at as - varchar -), '') || '-' || coalesce(cast(_airbyte_emitted_at as - varchar -), '') as - varchar -)) as _airbyte_unique_key_scd, + md5(cast(coalesce(cast(_airbyte_unique_key as text), '') || '-' || coalesce(cast(_airbyte_start_at as text), '') || '-' || coalesce(cast(_airbyte_emitted_at as text), '') as text)) as _airbyte_unique_key_scd, scd_data.* from scd_data ) diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_incremental/scd/test_normalization/some_stream_that_was_empty_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_incremental/scd/test_normalization/some_stream_that_was_empty_scd.sql index 53ef64cb928a..885ba6546326 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_incremental/scd/test_normalization/some_stream_that_was_empty_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_incremental/scd/test_normalization/some_stream_that_was_empty_scd.sql @@ -16,11 +16,7 @@ input_data as ( scd_data as ( -- SQL model to build a Type 2 Slowly Changing Dimension (SCD) table for each record identified by their primary key select - md5(cast(coalesce(cast("id" as - varchar -), '') as - varchar -)) as _airbyte_unique_key, + md5(cast(coalesce(cast("id" as text), '') as text)) as _airbyte_unique_key, "id", "date", "date" as _airbyte_start_at, @@ -54,15 +50,7 @@ dedup_data as ( _airbyte_emitted_at order by _airbyte_active_row desc, _airbyte_ab_id ) as _airbyte_row_num, - md5(cast(coalesce(cast(_airbyte_unique_key as - varchar -), '') || '-' || coalesce(cast(_airbyte_start_at as - varchar -), '') || '-' || coalesce(cast(_airbyte_emitted_at as - varchar -), '') as - varchar -)) as _airbyte_unique_key_scd, + md5(cast(coalesce(cast(_airbyte_unique_key as text), '') || '-' || coalesce(cast(_airbyte_start_at as text), '') || '-' || coalesce(cast(_airbyte_emitted_at as text), '') as text)) as _airbyte_unique_key_scd, scd_data.* from scd_data ) diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_c___long_names_partition.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_c___long_names_partition.sql index bb7fbe5b2852..c2170eeb4df2 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_c___long_names_partition.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_c___long_names_partition.sql @@ -40,15 +40,7 @@ where 1 = 1 -- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__nested_stream_with_c___long_names_partition_ab2 select - md5(cast(coalesce(cast(_airbyte_nested_stre__nto_long_names_hashid as - varchar -), '') || '-' || coalesce(cast(double_array_data as - varchar -), '') || '-' || coalesce(cast("DATA" as - varchar -), '') as - varchar -)) as _airbyte_partition_hashid, + md5(cast(coalesce(cast(_airbyte_nested_stre__nto_long_names_hashid as text), '') || '-' || coalesce(cast(double_array_data as text), '') || '-' || coalesce(cast("DATA" as text), '') as text)) as _airbyte_partition_hashid, tmp.* from __dbt__cte__nested_stream_with_c___long_names_partition_ab2 tmp -- partition at nested_stream_with_complex_columns_resulting_into_long_names/partition diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_c___names_partition_data.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_c___names_partition_data.sql index 76d0f6c37973..36a8a151153a 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_c___names_partition_data.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_c___names_partition_data.sql @@ -31,9 +31,7 @@ and "DATA" is not null -- depends_on: __dbt__cte__nested_stream_with_c___names_partition_data_ab1 select _airbyte_partition_hashid, - cast(currency as - varchar -) as currency, + cast(currency as text) as currency, _airbyte_ab_id, _airbyte_emitted_at, now() as _airbyte_normalized_at @@ -46,13 +44,7 @@ where 1 = 1 -- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__nested_stream_with_c___names_partition_data_ab2 select - md5(cast(coalesce(cast(_airbyte_partition_hashid as - varchar -), '') || '-' || coalesce(cast(currency as - varchar -), '') as - varchar -)) as _airbyte_data_hashid, + md5(cast(coalesce(cast(_airbyte_partition_hashid as text), '') || '-' || coalesce(cast(currency as text), '') as text)) as _airbyte_data_hashid, tmp.* from __dbt__cte__nested_stream_with_c___names_partition_data_ab2 tmp -- DATA at nested_stream_with_complex_columns_resulting_into_long_names/partition/DATA diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_c__ion_double_array_data.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_c__ion_double_array_data.sql index 7ffecd5d71c9..4b6ec7808487 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_c__ion_double_array_data.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_c__ion_double_array_data.sql @@ -31,9 +31,7 @@ and double_array_data is not null -- depends_on: __dbt__cte__nested_stream_with_c__ion_double_array_data_ab1 select _airbyte_partition_hashid, - cast("id" as - varchar -) as "id", + cast("id" as text) as "id", _airbyte_ab_id, _airbyte_emitted_at, now() as _airbyte_normalized_at @@ -46,13 +44,7 @@ where 1 = 1 -- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__nested_stream_with_c__ion_double_array_data_ab2 select - md5(cast(coalesce(cast(_airbyte_partition_hashid as - varchar -), '') || '-' || coalesce(cast("id" as - varchar -), '') as - varchar -)) as _airbyte_double_array_data_hashid, + md5(cast(coalesce(cast(_airbyte_partition_hashid as text), '') || '-' || coalesce(cast("id" as text), '') as text)) as _airbyte_double_array_data_hashid, tmp.* from __dbt__cte__nested_stream_with_c__ion_double_array_data_ab2 tmp -- double_array_data at nested_stream_with_complex_columns_resulting_into_long_names/partition/double_array_data diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_c__lting_into_long_names_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_c__lting_into_long_names_stg.sql index 0b9498b27453..9062ea955a07 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_c__lting_into_long_names_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_c__lting_into_long_names_stg.sql @@ -26,12 +26,8 @@ where 1 = 1 -- SQL model to cast each column to its adequate SQL type converted from the JSON schema type -- depends_on: __dbt__cte__nested_stream_with_c__lting_into_long_names_ab1 select - cast("id" as - varchar -) as "id", - cast("date" as - varchar -) as "date", + cast("id" as text) as "id", + cast("date" as text) as "date", cast("partition" as jsonb ) as "partition", @@ -45,15 +41,7 @@ where 1 = 1 )-- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__nested_stream_with_c__lting_into_long_names_ab2 select - md5(cast(coalesce(cast("id" as - varchar -), '') || '-' || coalesce(cast("date" as - varchar -), '') || '-' || coalesce(cast("partition" as - varchar -), '') as - varchar -)) as _airbyte_nested_stre__nto_long_names_hashid, + md5(cast(coalesce(cast("id" as text), '') || '-' || coalesce(cast("date" as text), '') || '-' || coalesce(cast("partition" as text), '') as text)) as _airbyte_nested_stre__nto_long_names_hashid, tmp.* from __dbt__cte__nested_stream_with_c__lting_into_long_names_ab2 tmp -- nested_stream_with_c__lting_into_long_names diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_incremental/test_normalization/some_stream_that_was_empty_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_incremental/test_normalization/some_stream_that_was_empty_stg.sql index a4af81ada08a..e473519de41a 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_incremental/test_normalization/some_stream_that_was_empty_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_incremental/test_normalization/some_stream_that_was_empty_stg.sql @@ -23,12 +23,8 @@ where 1 = 1 -- SQL model to cast each column to its adequate SQL type converted from the JSON schema type -- depends_on: __dbt__cte__some_stream_that_was_empty_ab1 select - cast("id" as - varchar -) as "id", - cast("date" as - varchar -) as "date", + cast("id" as text) as "id", + cast("date" as text) as "date", _airbyte_ab_id, _airbyte_emitted_at, now() as _airbyte_normalized_at @@ -39,13 +35,7 @@ where 1 = 1 )-- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__some_stream_that_was_empty_ab2 select - md5(cast(coalesce(cast("id" as - varchar -), '') || '-' || coalesce(cast("date" as - varchar -), '') as - varchar -)) as _airbyte_some_stream_that_was_empty_hashid, + md5(cast(coalesce(cast("id" as text), '') || '-' || coalesce(cast("date" as text), '') as text)) as _airbyte_some_stream_that_was_empty_hashid, tmp.* from __dbt__cte__some_stream_that_was_empty_ab2 tmp -- some_stream_that_was_empty diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_incremental/test_normalization_namespace/simple_stream_with_n__lting_into_long_names.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_incremental/test_normalization_namespace/simple_stream_with_n__lting_into_long_names.sql index ba7fb3853707..aea94f43825c 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_incremental/test_normalization_namespace/simple_stream_with_n__lting_into_long_names.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_incremental/test_normalization_namespace/simple_stream_with_n__lting_into_long_names.sql @@ -23,12 +23,8 @@ where 1 = 1 -- SQL model to cast each column to its adequate SQL type converted from the JSON schema type -- depends_on: __dbt__cte__simple_stream_with_n__lting_into_long_names_ab1 select - cast("id" as - varchar -) as "id", - cast("date" as - varchar -) as "date", + cast("id" as text) as "id", + cast("date" as text) as "date", _airbyte_ab_id, _airbyte_emitted_at, now() as _airbyte_normalized_at @@ -41,13 +37,7 @@ where 1 = 1 -- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__simple_stream_with_n__lting_into_long_names_ab2 select - md5(cast(coalesce(cast("id" as - varchar -), '') || '-' || coalesce(cast("date" as - varchar -), '') as - varchar -)) as _airbyte_simple_stre__nto_long_names_hashid, + md5(cast(coalesce(cast("id" as text), '') || '-' || coalesce(cast("date" as text), '') as text)) as _airbyte_simple_stre__nto_long_names_hashid, tmp.* from __dbt__cte__simple_stream_with_n__lting_into_long_names_ab2 tmp -- simple_stream_with_n__lting_into_long_names diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/conflict_stream_array.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/conflict_stream_array.sql index d5c47531a891..c1c6ab12a7b7 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/conflict_stream_array.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/conflict_stream_array.sql @@ -21,9 +21,7 @@ where 1 = 1 -- SQL model to cast each column to its adequate SQL type converted from the JSON schema type -- depends_on: __dbt__cte__conflict_stream_array_ab1 select - cast("id" as - varchar -) as "id", + cast("id" as text) as "id", conflict_stream_array, _airbyte_ab_id, _airbyte_emitted_at, @@ -36,13 +34,7 @@ where 1 = 1 -- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__conflict_stream_array_ab2 select - md5(cast(coalesce(cast("id" as - varchar -), '') || '-' || coalesce(cast(conflict_stream_array as - varchar -), '') as - varchar -)) as _airbyte_conflict_stream_array_hashid, + md5(cast(coalesce(cast("id" as text), '') || '-' || coalesce(cast(conflict_stream_array as text), '') as text)) as _airbyte_conflict_stream_array_hashid, tmp.* from __dbt__cte__conflict_stream_array_ab2 tmp -- conflict_stream_array diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/conflict_stream_name.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/conflict_stream_name.sql index dba6f29e197c..ac5cffb8d00d 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/conflict_stream_name.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/conflict_stream_name.sql @@ -23,9 +23,7 @@ where 1 = 1 -- SQL model to cast each column to its adequate SQL type converted from the JSON schema type -- depends_on: __dbt__cte__conflict_stream_name_ab1 select - cast("id" as - varchar -) as "id", + cast("id" as text) as "id", cast(conflict_stream_name as jsonb ) as conflict_stream_name, @@ -40,13 +38,7 @@ where 1 = 1 -- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__conflict_stream_name_ab2 select - md5(cast(coalesce(cast("id" as - varchar -), '') || '-' || coalesce(cast(conflict_stream_name as - varchar -), '') as - varchar -)) as _airbyte_conflict_stream_name_hashid, + md5(cast(coalesce(cast("id" as text), '') || '-' || coalesce(cast(conflict_stream_name as text), '') as text)) as _airbyte_conflict_stream_name_hashid, tmp.* from __dbt__cte__conflict_stream_name_ab2 tmp -- conflict_stream_name diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/conflict_stream_name___conflict_stream_name.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/conflict_stream_name___conflict_stream_name.sql index 55404b797442..4aa2c420ed45 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/conflict_stream_name___conflict_stream_name.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/conflict_stream_name___conflict_stream_name.sql @@ -23,9 +23,7 @@ and conflict_stream_name is not null -- depends_on: __dbt__cte__conflict_stream_name___conflict_stream_name_ab1 select _airbyte_conflict_stream_name_2_hashid, - cast(groups as - varchar -) as groups, + cast(groups as text) as groups, _airbyte_ab_id, _airbyte_emitted_at, now() as _airbyte_normalized_at @@ -37,13 +35,7 @@ where 1 = 1 -- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__conflict_stream_name___conflict_stream_name_ab2 select - md5(cast(coalesce(cast(_airbyte_conflict_stream_name_2_hashid as - varchar -), '') || '-' || coalesce(cast(groups as - varchar -), '') as - varchar -)) as _airbyte_conflict_stream_name_3_hashid, + md5(cast(coalesce(cast(_airbyte_conflict_stream_name_2_hashid as text), '') || '-' || coalesce(cast(groups as text), '') as text)) as _airbyte_conflict_stream_name_3_hashid, tmp.* from __dbt__cte__conflict_stream_name___conflict_stream_name_ab2 tmp -- conflict_stream_name at conflict_stream_name/conflict_stream_name/conflict_stream_name diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/conflict_stream_name_conflict_stream_name.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/conflict_stream_name_conflict_stream_name.sql index ea9792be5a9f..82dfb023674e 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/conflict_stream_name_conflict_stream_name.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/conflict_stream_name_conflict_stream_name.sql @@ -39,13 +39,7 @@ where 1 = 1 -- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__conflict_stream_name_conflict_stream_name_ab2 select - md5(cast(coalesce(cast(_airbyte_conflict_stream_name_hashid as - varchar -), '') || '-' || coalesce(cast(conflict_stream_name as - varchar -), '') as - varchar -)) as _airbyte_conflict_stream_name_2_hashid, + md5(cast(coalesce(cast(_airbyte_conflict_stream_name_hashid as text), '') || '-' || coalesce(cast(conflict_stream_name as text), '') as text)) as _airbyte_conflict_stream_name_2_hashid, tmp.* from __dbt__cte__conflict_stream_name_conflict_stream_name_ab2 tmp -- conflict_stream_name at conflict_stream_name/conflict_stream_name diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/conflict_stream_scalar.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/conflict_stream_scalar.sql index fec20e8f1d5e..09a4fa01de97 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/conflict_stream_scalar.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/conflict_stream_scalar.sql @@ -21,9 +21,7 @@ where 1 = 1 -- SQL model to cast each column to its adequate SQL type converted from the JSON schema type -- depends_on: __dbt__cte__conflict_stream_scalar_ab1 select - cast("id" as - varchar -) as "id", + cast("id" as text) as "id", cast(conflict_stream_scalar as bigint ) as conflict_stream_scalar, @@ -38,13 +36,7 @@ where 1 = 1 -- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__conflict_stream_scalar_ab2 select - md5(cast(coalesce(cast("id" as - varchar -), '') || '-' || coalesce(cast(conflict_stream_scalar as - varchar -), '') as - varchar -)) as _airbyte_conflict_stream_scalar_hashid, + md5(cast(coalesce(cast("id" as text), '') || '-' || coalesce(cast(conflict_stream_scalar as text), '') as text)) as _airbyte_conflict_stream_scalar_hashid, tmp.* from __dbt__cte__conflict_stream_scalar_ab2 tmp -- conflict_stream_scalar diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/non_nested_stream_wi__lting_into_long_names.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/non_nested_stream_wi__lting_into_long_names.sql index 3b267eea4346..31d2176c3888 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/non_nested_stream_wi__lting_into_long_names.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/non_nested_stream_wi__lting_into_long_names.sql @@ -21,12 +21,8 @@ where 1 = 1 -- SQL model to cast each column to its adequate SQL type converted from the JSON schema type -- depends_on: __dbt__cte__non_nested_stream_wi__lting_into_long_names_ab1 select - cast("id" as - varchar -) as "id", - cast("date" as - varchar -) as "date", + cast("id" as text) as "id", + cast("date" as text) as "date", _airbyte_ab_id, _airbyte_emitted_at, now() as _airbyte_normalized_at @@ -38,13 +34,7 @@ where 1 = 1 -- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__non_nested_stream_wi__lting_into_long_names_ab2 select - md5(cast(coalesce(cast("id" as - varchar -), '') || '-' || coalesce(cast("date" as - varchar -), '') as - varchar -)) as _airbyte_non_nested___nto_long_names_hashid, + md5(cast(coalesce(cast("id" as text), '') || '-' || coalesce(cast("date" as text), '') as text)) as _airbyte_non_nested___nto_long_names_hashid, tmp.* from __dbt__cte__non_nested_stream_wi__lting_into_long_names_ab2 tmp -- non_nested_stream_wi__lting_into_long_names diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/unnest_alias.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/unnest_alias.sql index 4a7cb02c98d0..7af2f04f81f8 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/unnest_alias.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/unnest_alias.sql @@ -36,13 +36,7 @@ where 1 = 1 -- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__unnest_alias_ab2 select - md5(cast(coalesce(cast("id" as - varchar -), '') || '-' || coalesce(cast(children as - varchar -), '') as - varchar -)) as _airbyte_unnest_alias_hashid, + md5(cast(coalesce(cast("id" as text), '') || '-' || coalesce(cast(children as text), '') as text)) as _airbyte_unnest_alias_hashid, tmp.* from __dbt__cte__unnest_alias_ab2 tmp -- unnest_alias diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/unnest_alias_childre__column___with__quotes.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/unnest_alias_childre__column___with__quotes.sql index a3cbb5c562e7..6688069a62f0 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/unnest_alias_childre__column___with__quotes.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/unnest_alias_childre__column___with__quotes.sql @@ -29,9 +29,7 @@ and "column`_'with""_quotes" is not null -- depends_on: __dbt__cte__unnest_alias_childre__column___with__quotes_ab1 select _airbyte_owner_hashid, - cast(currency as - varchar -) as currency, + cast(currency as text) as currency, _airbyte_ab_id, _airbyte_emitted_at, now() as _airbyte_normalized_at @@ -43,13 +41,7 @@ where 1 = 1 -- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__unnest_alias_childre__column___with__quotes_ab2 select - md5(cast(coalesce(cast(_airbyte_owner_hashid as - varchar -), '') || '-' || coalesce(cast(currency as - varchar -), '') as - varchar -)) as _airbyte_column___with__quotes_hashid, + md5(cast(coalesce(cast(_airbyte_owner_hashid as text), '') || '-' || coalesce(cast(currency as text), '') as text)) as _airbyte_column___with__quotes_hashid, tmp.* from __dbt__cte__unnest_alias_childre__column___with__quotes_ab2 tmp -- column___with__quotes at unnest_alias/children/owner/column`_'with"_quotes diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/unnest_alias_children.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/unnest_alias_children.sql index a67bbcdbc1ef..779394d5765d 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/unnest_alias_children.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/unnest_alias_children.sql @@ -49,15 +49,7 @@ where 1 = 1 -- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__unnest_alias_children_ab2 select - md5(cast(coalesce(cast(_airbyte_unnest_alias_hashid as - varchar -), '') || '-' || coalesce(cast(ab_id as - varchar -), '') || '-' || coalesce(cast("owner" as - varchar -), '') as - varchar -)) as _airbyte_children_hashid, + md5(cast(coalesce(cast(_airbyte_unnest_alias_hashid as text), '') || '-' || coalesce(cast(ab_id as text), '') || '-' || coalesce(cast("owner" as text), '') as text)) as _airbyte_children_hashid, tmp.* from __dbt__cte__unnest_alias_children_ab2 tmp -- children at unnest_alias/children diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/unnest_alias_children_owner.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/unnest_alias_children_owner.sql index 860b4d724bbb..651e1c11914e 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/unnest_alias_children_owner.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/first_output/airbyte_tables/test_normalization/unnest_alias_children_owner.sql @@ -39,15 +39,7 @@ where 1 = 1 -- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__unnest_alias_children_owner_ab2 select - md5(cast(coalesce(cast(_airbyte_children_hashid as - varchar -), '') || '-' || coalesce(cast(owner_id as - varchar -), '') || '-' || coalesce(cast("column`_'with""_quotes" as - varchar -), '') as - varchar -)) as _airbyte_owner_hashid, + md5(cast(coalesce(cast(_airbyte_children_hashid as text), '') || '-' || coalesce(cast(owner_id as text), '') || '-' || coalesce(cast("column`_'with""_quotes" as text), '') as text)) as _airbyte_owner_hashid, tmp.* from __dbt__cte__unnest_alias_children_owner_ab2 tmp -- owner at unnest_alias/children/owner diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/conflict_stream_name_ab3.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/conflict_stream_name_ab3.sql index 4e4705096dab..78f7cfe9bea5 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/conflict_stream_name_ab3.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/conflict_stream_name_ab3.sql @@ -9,7 +9,7 @@ select {{ dbt_utils.surrogate_key([ adapter.quote('id'), - 'conflict_stream_name', + object_to_string('conflict_stream_name'), ]) }} as _airbyte_conflict_stream_name_hashid, tmp.* from {{ ref('conflict_stream_name_ab2') }} tmp diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/conflict_stream_name_conflict_stream_name_ab3.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/conflict_stream_name_conflict_stream_name_ab3.sql index 0c8e2992b976..0892d6143276 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/conflict_stream_name_conflict_stream_name_ab3.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/conflict_stream_name_conflict_stream_name_ab3.sql @@ -8,7 +8,7 @@ select {{ dbt_utils.surrogate_key([ '_airbyte_conflict_stream_name_hashid', - 'conflict_stream_name', + object_to_string('conflict_stream_name'), ]) }} as _airbyte_conflict_stream_name_2_hashid, tmp.* from {{ ref('conflict_stream_name_conflict_stream_name_ab2') }} tmp diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c___long_names_partition_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c___long_names_partition_ab1.sql index 6be1492d1a76..fafabe2d9840 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c___long_names_partition_ab1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c___long_names_partition_ab1.sql @@ -16,5 +16,5 @@ from {{ ref('nested_stream_with_c__lting_into_long_names_scd') }} as table_alias -- partition at nested_stream_with_complex_columns_resulting_into_long_names/partition where 1 = 1 and {{ adapter.quote('partition') }} is not null -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c___long_names_partition_ab2.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c___long_names_partition_ab2.sql index 34c79fa90c6a..a622952dbeff 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c___long_names_partition_ab2.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c___long_names_partition_ab2.sql @@ -15,5 +15,5 @@ select from {{ ref('nested_stream_with_c___long_names_partition_ab1') }} -- partition at nested_stream_with_complex_columns_resulting_into_long_names/partition where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c___long_names_partition_ab3.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c___long_names_partition_ab3.sql index 71d2f61739eb..3eb1b8183827 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c___long_names_partition_ab3.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c___long_names_partition_ab3.sql @@ -15,5 +15,5 @@ select from {{ ref('nested_stream_with_c___long_names_partition_ab2') }} tmp -- partition at nested_stream_with_complex_columns_resulting_into_long_names/partition where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c___names_partition_data_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c___names_partition_data_ab1.sql index 6f510faef59b..0aab8469aefd 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c___names_partition_data_ab1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c___names_partition_data_ab1.sql @@ -17,5 +17,5 @@ from {{ ref('nested_stream_with_c___long_names_partition') }} as table_alias {{ cross_join_unnest('partition', adapter.quote('DATA')) }} where 1 = 1 and {{ adapter.quote('DATA') }} is not null -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c___names_partition_data_ab2.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c___names_partition_data_ab2.sql index 916726e052fd..f6cb35f7d406 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c___names_partition_data_ab2.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c___names_partition_data_ab2.sql @@ -14,5 +14,5 @@ select from {{ ref('nested_stream_with_c___names_partition_data_ab1') }} -- DATA at nested_stream_with_complex_columns_resulting_into_long_names/partition/DATA where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c___names_partition_data_ab3.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c___names_partition_data_ab3.sql index c50169f54ede..f06e21a1432e 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c___names_partition_data_ab3.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c___names_partition_data_ab3.sql @@ -14,5 +14,5 @@ select from {{ ref('nested_stream_with_c___names_partition_data_ab2') }} tmp -- DATA at nested_stream_with_complex_columns_resulting_into_long_names/partition/DATA where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c__ion_double_array_data_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c__ion_double_array_data_ab1.sql index 193f3ba04ddd..5f674cdcd1a6 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c__ion_double_array_data_ab1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c__ion_double_array_data_ab1.sql @@ -17,5 +17,5 @@ from {{ ref('nested_stream_with_c___long_names_partition') }} as table_alias {{ cross_join_unnest('partition', 'double_array_data') }} where 1 = 1 and double_array_data is not null -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c__ion_double_array_data_ab2.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c__ion_double_array_data_ab2.sql index b66908781226..6d785589955d 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c__ion_double_array_data_ab2.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c__ion_double_array_data_ab2.sql @@ -14,5 +14,5 @@ select from {{ ref('nested_stream_with_c__ion_double_array_data_ab1') }} -- double_array_data at nested_stream_with_complex_columns_resulting_into_long_names/partition/double_array_data where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c__ion_double_array_data_ab3.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c__ion_double_array_data_ab3.sql index c586286df6d3..c83657e465f6 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c__ion_double_array_data_ab3.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c__ion_double_array_data_ab3.sql @@ -14,5 +14,5 @@ select from {{ ref('nested_stream_with_c__ion_double_array_data_ab2') }} tmp -- double_array_data at nested_stream_with_complex_columns_resulting_into_long_names/partition/double_array_data where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c__lting_into_long_names_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c__lting_into_long_names_ab1.sql index 49ae7cb8fc1f..767a1071f174 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c__lting_into_long_names_ab1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c__lting_into_long_names_ab1.sql @@ -16,5 +16,5 @@ select from {{ source('test_normalization', '_airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names') }} as table_alias -- nested_stream_with_c__lting_into_long_names where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c__lting_into_long_names_ab2.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c__lting_into_long_names_ab2.sql index 9971fec8280c..6739cf914f38 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c__lting_into_long_names_ab2.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_c__lting_into_long_names_ab2.sql @@ -16,5 +16,5 @@ select from {{ ref('nested_stream_with_c__lting_into_long_names_ab1') }} -- nested_stream_with_c__lting_into_long_names where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/some_stream_that_was_empty_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/some_stream_that_was_empty_ab1.sql index 1f6710e4f97a..6862a6ac2688 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/some_stream_that_was_empty_ab1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/some_stream_that_was_empty_ab1.sql @@ -15,5 +15,5 @@ select from {{ source('test_normalization', '_airbyte_raw_some_stream_that_was_empty') }} as table_alias -- some_stream_that_was_empty where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/some_stream_that_was_empty_ab2.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/some_stream_that_was_empty_ab2.sql index ab64cad9c732..258f8b697b56 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/some_stream_that_was_empty_ab2.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/some_stream_that_was_empty_ab2.sql @@ -15,5 +15,5 @@ select from {{ ref('some_stream_that_was_empty_ab1') }} -- some_stream_that_was_empty where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/unnest_alias_children_ab3.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/unnest_alias_children_ab3.sql index e262bd8da748..e5a3aa0268c5 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/unnest_alias_children_ab3.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization/unnest_alias_children_ab3.sql @@ -9,7 +9,7 @@ select {{ dbt_utils.surrogate_key([ '_airbyte_unnest_alias_hashid', 'ab_id', - adapter.quote('owner'), + object_to_string(adapter.quote('owner')), ]) }} as _airbyte_children_hashid, tmp.* from {{ ref('unnest_alias_children_ab2') }} tmp diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization_namespace/simple_stream_with_n__lting_into_long_names_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization_namespace/simple_stream_with_n__lting_into_long_names_ab1.sql index a77b0f0ac727..b73287682765 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization_namespace/simple_stream_with_n__lting_into_long_names_ab1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization_namespace/simple_stream_with_n__lting_into_long_names_ab1.sql @@ -15,5 +15,5 @@ select from {{ source('test_normalization_namespace', '_airbyte_raw_simple_stream_with_namespace_resulting_into_long_names') }} as table_alias -- simple_stream_with_n__lting_into_long_names where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization_namespace/simple_stream_with_n__lting_into_long_names_ab2.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization_namespace/simple_stream_with_n__lting_into_long_names_ab2.sql index b19efa39ea61..a2f35bfcefb1 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization_namespace/simple_stream_with_n__lting_into_long_names_ab2.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization_namespace/simple_stream_with_n__lting_into_long_names_ab2.sql @@ -15,5 +15,5 @@ select from {{ ref('simple_stream_with_n__lting_into_long_names_ab1') }} -- simple_stream_with_n__lting_into_long_names where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization_namespace/simple_stream_with_n__lting_into_long_names_ab3.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization_namespace/simple_stream_with_n__lting_into_long_names_ab3.sql index 3ab506bf52d1..231ba585f702 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization_namespace/simple_stream_with_n__lting_into_long_names_ab3.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_ctes/test_normalization_namespace/simple_stream_with_n__lting_into_long_names_ab3.sql @@ -15,5 +15,5 @@ select from {{ ref('simple_stream_with_n__lting_into_long_names_ab2') }} tmp -- simple_stream_with_n__lting_into_long_names where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/scd/test_normalization/nested_stream_with_c__lting_into_long_names_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/scd/test_normalization/nested_stream_with_c__lting_into_long_names_scd.sql index 03e7d58bbeab..5eaf6186aaab 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/scd/test_normalization/nested_stream_with_c__lting_into_long_names_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/scd/test_normalization/nested_stream_with_c__lting_into_long_names_scd.sql @@ -2,7 +2,53 @@ indexes = [{'columns':['_airbyte_active_row','_airbyte_unique_key_scd','_airbyte_emitted_at'],'type': 'btree'}], unique_key = "_airbyte_unique_key_scd", schema = "test_normalization", - post_hook = ["delete from _airbyte_test_normalization.nested_stream_with_c__lting_into_long_names_stg where _airbyte_emitted_at != (select max(_airbyte_emitted_at) from _airbyte_test_normalization.nested_stream_with_c__lting_into_long_names_stg)"], + post_hook = [" + {% + set final_table_relation = adapter.get_relation( + database=this.database, + schema=this.schema, + identifier='nested_stream_with_c__lting_into_long_names' + ) + %} + {# + If the final table doesn't exist, then obviously we can't delete anything from it. + Also, after a reset, the final table is created without the _airbyte_unique_key column (this column is created during the first sync) + So skip this deletion if the column doesn't exist. (in this case, the table is guaranteed to be empty anyway) + #} + {% + if final_table_relation is not none and '_airbyte_unique_key' in adapter.get_columns_in_relation(final_table_relation)|map(attribute='name') + %} + -- Delete records which are no longer active: + -- This query is equivalent, but the left join version is more performant: + -- delete from final_table where unique_key in ( + -- select unique_key from scd_table where 1 = 1 + -- ) and unique_key not in ( + -- select unique_key from scd_table where active_row = 1 + -- ) + -- We're incremental against normalized_at rather than emitted_at because we need to fetch the SCD + -- entries that were _updated_ recently. This is because a deleted record will have an SCD record + -- which was emitted a long time ago, but recently re-normalized to have active_row = 0. + delete from {{ final_table_relation }} where {{ final_table_relation }}._airbyte_unique_key in ( + select recent_records.unique_key + from ( + select distinct _airbyte_unique_key as unique_key + from {{ this }} + where 1=1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('nested_stream_with_c__lting_into_long_names')) }} + ) recent_records + left join ( + select _airbyte_unique_key as unique_key, count(_airbyte_unique_key) as active_count + from {{ this }} + where _airbyte_active_row = 1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('nested_stream_with_c__lting_into_long_names')) }} + group by _airbyte_unique_key + ) active_counts + on recent_records.unique_key = active_counts.unique_key + where active_count is null or active_count = 0 + ) + {% else %} + -- We have to have a non-empty query, so just do a noop delete + delete from {{ this }} where 1=0 + {% endif %} + ","delete from _airbyte_test_normalization.nested_stream_with_c__lting_into_long_names_stg where _airbyte_emitted_at != (select max(_airbyte_emitted_at) from _airbyte_test_normalization.nested_stream_with_c__lting_into_long_names_stg)"], tags = [ "top-level" ] ) }} -- depends_on: ref('nested_stream_with_c__lting_into_long_names_stg') @@ -15,7 +61,7 @@ new_data as ( from {{ ref('nested_stream_with_c__lting_into_long_names_stg') }} -- nested_stream_with_c__lting_into_long_names from {{ source('test_normalization', '_airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names') }} where 1 = 1 - {{ incremental_clause('_airbyte_emitted_at') }} + {{ incremental_clause('_airbyte_emitted_at', this) }} ), new_data_ids as ( -- build a subset of _airbyte_unique_key from rows that are new diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/scd/test_normalization/some_stream_that_was_empty_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/scd/test_normalization/some_stream_that_was_empty_scd.sql index 0caa4d9bfc65..c35233d432cb 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/scd/test_normalization/some_stream_that_was_empty_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/scd/test_normalization/some_stream_that_was_empty_scd.sql @@ -2,7 +2,53 @@ indexes = [{'columns':['_airbyte_active_row','_airbyte_unique_key_scd','_airbyte_emitted_at'],'type': 'btree'}], unique_key = "_airbyte_unique_key_scd", schema = "test_normalization", - post_hook = ["delete from _airbyte_test_normalization.some_stream_that_was_empty_stg where _airbyte_emitted_at != (select max(_airbyte_emitted_at) from _airbyte_test_normalization.some_stream_that_was_empty_stg)"], + post_hook = [" + {% + set final_table_relation = adapter.get_relation( + database=this.database, + schema=this.schema, + identifier='some_stream_that_was_empty' + ) + %} + {# + If the final table doesn't exist, then obviously we can't delete anything from it. + Also, after a reset, the final table is created without the _airbyte_unique_key column (this column is created during the first sync) + So skip this deletion if the column doesn't exist. (in this case, the table is guaranteed to be empty anyway) + #} + {% + if final_table_relation is not none and '_airbyte_unique_key' in adapter.get_columns_in_relation(final_table_relation)|map(attribute='name') + %} + -- Delete records which are no longer active: + -- This query is equivalent, but the left join version is more performant: + -- delete from final_table where unique_key in ( + -- select unique_key from scd_table where 1 = 1 + -- ) and unique_key not in ( + -- select unique_key from scd_table where active_row = 1 + -- ) + -- We're incremental against normalized_at rather than emitted_at because we need to fetch the SCD + -- entries that were _updated_ recently. This is because a deleted record will have an SCD record + -- which was emitted a long time ago, but recently re-normalized to have active_row = 0. + delete from {{ final_table_relation }} where {{ final_table_relation }}._airbyte_unique_key in ( + select recent_records.unique_key + from ( + select distinct _airbyte_unique_key as unique_key + from {{ this }} + where 1=1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('some_stream_that_was_empty')) }} + ) recent_records + left join ( + select _airbyte_unique_key as unique_key, count(_airbyte_unique_key) as active_count + from {{ this }} + where _airbyte_active_row = 1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('some_stream_that_was_empty')) }} + group by _airbyte_unique_key + ) active_counts + on recent_records.unique_key = active_counts.unique_key + where active_count is null or active_count = 0 + ) + {% else %} + -- We have to have a non-empty query, so just do a noop delete + delete from {{ this }} where 1=0 + {% endif %} + ","delete from _airbyte_test_normalization.some_stream_that_was_empty_stg where _airbyte_emitted_at != (select max(_airbyte_emitted_at) from _airbyte_test_normalization.some_stream_that_was_empty_stg)"], tags = [ "top-level" ] ) }} -- depends_on: ref('some_stream_that_was_empty_stg') @@ -15,7 +61,7 @@ new_data as ( from {{ ref('some_stream_that_was_empty_stg') }} -- some_stream_that_was_empty from {{ source('test_normalization', '_airbyte_raw_some_stream_that_was_empty') }} where 1 = 1 - {{ incremental_clause('_airbyte_emitted_at') }} + {{ incremental_clause('_airbyte_emitted_at', this) }} ), new_data_ids as ( -- build a subset of _airbyte_unique_key from rows that are new diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_c___long_names_partition.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_c___long_names_partition.sql index c79a3b8f56cb..92e9c5d4fe08 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_c___long_names_partition.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_c___long_names_partition.sql @@ -16,5 +16,5 @@ select from {{ ref('nested_stream_with_c___long_names_partition_ab3') }} -- partition at nested_stream_with_complex_columns_resulting_into_long_names/partition from {{ ref('nested_stream_with_c__lting_into_long_names_scd') }} where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_c___names_partition_data.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_c___names_partition_data.sql index 428b290262c6..f453cd838e21 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_c___names_partition_data.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_c___names_partition_data.sql @@ -15,5 +15,5 @@ select from {{ ref('nested_stream_with_c___names_partition_data_ab3') }} -- DATA at nested_stream_with_complex_columns_resulting_into_long_names/partition/DATA from {{ ref('nested_stream_with_c___long_names_partition') }} where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_c__ion_double_array_data.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_c__ion_double_array_data.sql index db604519f873..ea7bc2e78095 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_c__ion_double_array_data.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_c__ion_double_array_data.sql @@ -15,5 +15,5 @@ select from {{ ref('nested_stream_with_c__ion_double_array_data_ab3') }} -- double_array_data at nested_stream_with_complex_columns_resulting_into_long_names/partition/double_array_data from {{ ref('nested_stream_with_c___long_names_partition') }} where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_c__lting_into_long_names.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_c__lting_into_long_names.sql index 83d89faed2fa..26c3aded7063 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_c__lting_into_long_names.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_c__lting_into_long_names.sql @@ -19,5 +19,5 @@ from {{ ref('nested_stream_with_c__lting_into_long_names_scd') }} -- nested_stream_with_c__lting_into_long_names from {{ source('test_normalization', '_airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names') }} where 1 = 1 and _airbyte_active_row = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_c__lting_into_long_names_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_c__lting_into_long_names_stg.sql index 69a21c2c6bff..8249fe95741a 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_c__lting_into_long_names_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_c__lting_into_long_names_stg.sql @@ -10,11 +10,11 @@ select {{ dbt_utils.surrogate_key([ adapter.quote('id'), adapter.quote('date'), - adapter.quote('partition'), + object_to_string(adapter.quote('partition')), ]) }} as _airbyte_nested_stre__nto_long_names_hashid, tmp.* from {{ ref('nested_stream_with_c__lting_into_long_names_ab2') }} tmp -- nested_stream_with_c__lting_into_long_names where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/test_normalization/some_stream_that_was_empty.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/test_normalization/some_stream_that_was_empty.sql index 4c66d140893a..23bcd85bcf91 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/test_normalization/some_stream_that_was_empty.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/test_normalization/some_stream_that_was_empty.sql @@ -18,5 +18,5 @@ from {{ ref('some_stream_that_was_empty_scd') }} -- some_stream_that_was_empty from {{ source('test_normalization', '_airbyte_raw_some_stream_that_was_empty') }} where 1 = 1 and _airbyte_active_row = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/test_normalization/some_stream_that_was_empty_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/test_normalization/some_stream_that_was_empty_stg.sql index a5849d296b63..ca645527eca8 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/test_normalization/some_stream_that_was_empty_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/test_normalization/some_stream_that_was_empty_stg.sql @@ -15,5 +15,5 @@ select from {{ ref('some_stream_that_was_empty_ab2') }} tmp -- some_stream_that_was_empty where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/test_normalization_namespace/simple_stream_with_n__lting_into_long_names.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/test_normalization_namespace/simple_stream_with_n__lting_into_long_names.sql index e0900f1be28e..7f70fc83c616 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/test_normalization_namespace/simple_stream_with_n__lting_into_long_names.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/models/generated/airbyte_incremental/test_normalization_namespace/simple_stream_with_n__lting_into_long_names.sql @@ -16,5 +16,5 @@ select from {{ ref('simple_stream_with_n__lting_into_long_names_ab3') }} -- simple_stream_with_n__lting_into_long_names from {{ source('test_normalization_namespace', '_airbyte_raw_simple_stream_with_namespace_resulting_into_long_names') }} where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/conflict_stream_array.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/conflict_stream_array.sql index d5c47531a891..c1c6ab12a7b7 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/conflict_stream_array.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/conflict_stream_array.sql @@ -21,9 +21,7 @@ where 1 = 1 -- SQL model to cast each column to its adequate SQL type converted from the JSON schema type -- depends_on: __dbt__cte__conflict_stream_array_ab1 select - cast("id" as - varchar -) as "id", + cast("id" as text) as "id", conflict_stream_array, _airbyte_ab_id, _airbyte_emitted_at, @@ -36,13 +34,7 @@ where 1 = 1 -- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__conflict_stream_array_ab2 select - md5(cast(coalesce(cast("id" as - varchar -), '') || '-' || coalesce(cast(conflict_stream_array as - varchar -), '') as - varchar -)) as _airbyte_conflict_stream_array_hashid, + md5(cast(coalesce(cast("id" as text), '') || '-' || coalesce(cast(conflict_stream_array as text), '') as text)) as _airbyte_conflict_stream_array_hashid, tmp.* from __dbt__cte__conflict_stream_array_ab2 tmp -- conflict_stream_array diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/conflict_stream_name.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/conflict_stream_name.sql index dba6f29e197c..ac5cffb8d00d 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/conflict_stream_name.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/conflict_stream_name.sql @@ -23,9 +23,7 @@ where 1 = 1 -- SQL model to cast each column to its adequate SQL type converted from the JSON schema type -- depends_on: __dbt__cte__conflict_stream_name_ab1 select - cast("id" as - varchar -) as "id", + cast("id" as text) as "id", cast(conflict_stream_name as jsonb ) as conflict_stream_name, @@ -40,13 +38,7 @@ where 1 = 1 -- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__conflict_stream_name_ab2 select - md5(cast(coalesce(cast("id" as - varchar -), '') || '-' || coalesce(cast(conflict_stream_name as - varchar -), '') as - varchar -)) as _airbyte_conflict_stream_name_hashid, + md5(cast(coalesce(cast("id" as text), '') || '-' || coalesce(cast(conflict_stream_name as text), '') as text)) as _airbyte_conflict_stream_name_hashid, tmp.* from __dbt__cte__conflict_stream_name_ab2 tmp -- conflict_stream_name diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/conflict_stream_name___conflict_stream_name.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/conflict_stream_name___conflict_stream_name.sql index 55404b797442..4aa2c420ed45 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/conflict_stream_name___conflict_stream_name.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/conflict_stream_name___conflict_stream_name.sql @@ -23,9 +23,7 @@ and conflict_stream_name is not null -- depends_on: __dbt__cte__conflict_stream_name___conflict_stream_name_ab1 select _airbyte_conflict_stream_name_2_hashid, - cast(groups as - varchar -) as groups, + cast(groups as text) as groups, _airbyte_ab_id, _airbyte_emitted_at, now() as _airbyte_normalized_at @@ -37,13 +35,7 @@ where 1 = 1 -- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__conflict_stream_name___conflict_stream_name_ab2 select - md5(cast(coalesce(cast(_airbyte_conflict_stream_name_2_hashid as - varchar -), '') || '-' || coalesce(cast(groups as - varchar -), '') as - varchar -)) as _airbyte_conflict_stream_name_3_hashid, + md5(cast(coalesce(cast(_airbyte_conflict_stream_name_2_hashid as text), '') || '-' || coalesce(cast(groups as text), '') as text)) as _airbyte_conflict_stream_name_3_hashid, tmp.* from __dbt__cte__conflict_stream_name___conflict_stream_name_ab2 tmp -- conflict_stream_name at conflict_stream_name/conflict_stream_name/conflict_stream_name diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/conflict_stream_name_conflict_stream_name.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/conflict_stream_name_conflict_stream_name.sql index ea9792be5a9f..82dfb023674e 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/conflict_stream_name_conflict_stream_name.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/conflict_stream_name_conflict_stream_name.sql @@ -39,13 +39,7 @@ where 1 = 1 -- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__conflict_stream_name_conflict_stream_name_ab2 select - md5(cast(coalesce(cast(_airbyte_conflict_stream_name_hashid as - varchar -), '') || '-' || coalesce(cast(conflict_stream_name as - varchar -), '') as - varchar -)) as _airbyte_conflict_stream_name_2_hashid, + md5(cast(coalesce(cast(_airbyte_conflict_stream_name_hashid as text), '') || '-' || coalesce(cast(conflict_stream_name as text), '') as text)) as _airbyte_conflict_stream_name_2_hashid, tmp.* from __dbt__cte__conflict_stream_name_conflict_stream_name_ab2 tmp -- conflict_stream_name at conflict_stream_name/conflict_stream_name diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/conflict_stream_scalar.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/conflict_stream_scalar.sql index fec20e8f1d5e..09a4fa01de97 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/conflict_stream_scalar.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/conflict_stream_scalar.sql @@ -21,9 +21,7 @@ where 1 = 1 -- SQL model to cast each column to its adequate SQL type converted from the JSON schema type -- depends_on: __dbt__cte__conflict_stream_scalar_ab1 select - cast("id" as - varchar -) as "id", + cast("id" as text) as "id", cast(conflict_stream_scalar as bigint ) as conflict_stream_scalar, @@ -38,13 +36,7 @@ where 1 = 1 -- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__conflict_stream_scalar_ab2 select - md5(cast(coalesce(cast("id" as - varchar -), '') || '-' || coalesce(cast(conflict_stream_scalar as - varchar -), '') as - varchar -)) as _airbyte_conflict_stream_scalar_hashid, + md5(cast(coalesce(cast("id" as text), '') || '-' || coalesce(cast(conflict_stream_scalar as text), '') as text)) as _airbyte_conflict_stream_scalar_hashid, tmp.* from __dbt__cte__conflict_stream_scalar_ab2 tmp -- conflict_stream_scalar diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/non_nested_stream_wi__lting_into_long_names.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/non_nested_stream_wi__lting_into_long_names.sql index 3b267eea4346..31d2176c3888 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/non_nested_stream_wi__lting_into_long_names.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/non_nested_stream_wi__lting_into_long_names.sql @@ -21,12 +21,8 @@ where 1 = 1 -- SQL model to cast each column to its adequate SQL type converted from the JSON schema type -- depends_on: __dbt__cte__non_nested_stream_wi__lting_into_long_names_ab1 select - cast("id" as - varchar -) as "id", - cast("date" as - varchar -) as "date", + cast("id" as text) as "id", + cast("date" as text) as "date", _airbyte_ab_id, _airbyte_emitted_at, now() as _airbyte_normalized_at @@ -38,13 +34,7 @@ where 1 = 1 -- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__non_nested_stream_wi__lting_into_long_names_ab2 select - md5(cast(coalesce(cast("id" as - varchar -), '') || '-' || coalesce(cast("date" as - varchar -), '') as - varchar -)) as _airbyte_non_nested___nto_long_names_hashid, + md5(cast(coalesce(cast("id" as text), '') || '-' || coalesce(cast("date" as text), '') as text)) as _airbyte_non_nested___nto_long_names_hashid, tmp.* from __dbt__cte__non_nested_stream_wi__lting_into_long_names_ab2 tmp -- non_nested_stream_wi__lting_into_long_names diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/unnest_alias.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/unnest_alias.sql index 4a7cb02c98d0..7af2f04f81f8 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/unnest_alias.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/unnest_alias.sql @@ -36,13 +36,7 @@ where 1 = 1 -- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__unnest_alias_ab2 select - md5(cast(coalesce(cast("id" as - varchar -), '') || '-' || coalesce(cast(children as - varchar -), '') as - varchar -)) as _airbyte_unnest_alias_hashid, + md5(cast(coalesce(cast("id" as text), '') || '-' || coalesce(cast(children as text), '') as text)) as _airbyte_unnest_alias_hashid, tmp.* from __dbt__cte__unnest_alias_ab2 tmp -- unnest_alias diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/unnest_alias_childre__column___with__quotes.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/unnest_alias_childre__column___with__quotes.sql index a3cbb5c562e7..6688069a62f0 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/unnest_alias_childre__column___with__quotes.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/unnest_alias_childre__column___with__quotes.sql @@ -29,9 +29,7 @@ and "column`_'with""_quotes" is not null -- depends_on: __dbt__cte__unnest_alias_childre__column___with__quotes_ab1 select _airbyte_owner_hashid, - cast(currency as - varchar -) as currency, + cast(currency as text) as currency, _airbyte_ab_id, _airbyte_emitted_at, now() as _airbyte_normalized_at @@ -43,13 +41,7 @@ where 1 = 1 -- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__unnest_alias_childre__column___with__quotes_ab2 select - md5(cast(coalesce(cast(_airbyte_owner_hashid as - varchar -), '') || '-' || coalesce(cast(currency as - varchar -), '') as - varchar -)) as _airbyte_column___with__quotes_hashid, + md5(cast(coalesce(cast(_airbyte_owner_hashid as text), '') || '-' || coalesce(cast(currency as text), '') as text)) as _airbyte_column___with__quotes_hashid, tmp.* from __dbt__cte__unnest_alias_childre__column___with__quotes_ab2 tmp -- column___with__quotes at unnest_alias/children/owner/column`_'with"_quotes diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/unnest_alias_children.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/unnest_alias_children.sql index a67bbcdbc1ef..779394d5765d 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/unnest_alias_children.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/unnest_alias_children.sql @@ -49,15 +49,7 @@ where 1 = 1 -- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__unnest_alias_children_ab2 select - md5(cast(coalesce(cast(_airbyte_unnest_alias_hashid as - varchar -), '') || '-' || coalesce(cast(ab_id as - varchar -), '') || '-' || coalesce(cast("owner" as - varchar -), '') as - varchar -)) as _airbyte_children_hashid, + md5(cast(coalesce(cast(_airbyte_unnest_alias_hashid as text), '') || '-' || coalesce(cast(ab_id as text), '') || '-' || coalesce(cast("owner" as text), '') as text)) as _airbyte_children_hashid, tmp.* from __dbt__cte__unnest_alias_children_ab2 tmp -- children at unnest_alias/children diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/unnest_alias_children_owner.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/unnest_alias_children_owner.sql index 860b4d724bbb..651e1c11914e 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/unnest_alias_children_owner.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_tables/test_normalization/unnest_alias_children_owner.sql @@ -39,15 +39,7 @@ where 1 = 1 -- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__unnest_alias_children_owner_ab2 select - md5(cast(coalesce(cast(_airbyte_children_hashid as - varchar -), '') || '-' || coalesce(cast(owner_id as - varchar -), '') || '-' || coalesce(cast("column`_'with""_quotes" as - varchar -), '') as - varchar -)) as _airbyte_owner_hashid, + md5(cast(coalesce(cast(_airbyte_children_hashid as text), '') || '-' || coalesce(cast(owner_id as text), '') || '-' || coalesce(cast("column`_'with""_quotes" as text), '') as text)) as _airbyte_owner_hashid, tmp.* from __dbt__cte__unnest_alias_children_owner_ab2 tmp -- owner at unnest_alias/children/owner diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/dbt_project.yml b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/dbt_project.yml index 88dde818dd4d..77cd51053747 100755 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/dbt_project.yml +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/dbt_project.yml @@ -1,45 +1,29 @@ -# This file is necessary to install dbt-utils with dbt deps -# the content will be overwritten by the transform function - -# Name your package! Package names should contain only lowercase characters -# and underscores. A good package name should reflect your organization's -# name or the intended use of these models -name: "airbyte_utils" +name: airbyte_utils version: "1.0" config-version: 2 - -# This setting configures which "profile" dbt uses for this project. Profiles contain -# database connection information, and should be configured in the ~/.dbt/profiles.yml file -profile: "normalize" - -# These configurations specify where dbt should look for different types of files. -# The `model-paths` config, for example, states that source models can be found -# in the "models/" directory. You probably won't need to change these! -model-paths: ["modified_models"] -docs-paths: ["docs"] -analysis-paths: ["analysis"] -test-paths: ["tests"] -seed-paths: ["data"] -macro-paths: ["macros"] - -target-path: "../build" # directory which will store compiled SQL files -log-path: "../logs" # directory which will store DBT logs -packages-install-path: "/dbt" # directory which will store external DBT dependencies - -clean-targets: # directories to be removed by `dbt clean` - - "build" - - "dbt_modules" - +profile: normalize +model-paths: + - modified_models +docs-paths: + - docs +analysis-paths: + - analysis +test-paths: + - tests +seed-paths: + - data +macro-paths: + - macros +target-path: ../build +log-path: ../logs +packages-install-path: /dbt +clean-targets: + - build + - dbt_modules quoting: database: true - # Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) - # all schemas should be unquoted schema: false identifier: true - -# You can define configurations for models in the `model-paths` directory here. -# Using these configurations, you can enable or disable models, change how they -# are materialized, and more! models: airbyte_utils: +materialized: table @@ -57,7 +41,30 @@ models: airbyte_views: +tags: airbyte_internal_views +materialized: view - dispatch: - macro_namespace: dbt_utils - search_order: ["airbyte_utils", "dbt_utils"] + search_order: + - airbyte_utils + - dbt_utils +vars: + json_column: _airbyte_data + models_to_source: + exchange_rate_ab1: test_normalization._airbyte_raw_exchange_rate + exchange_rate_ab2: test_normalization._airbyte_raw_exchange_rate + exchange_rate_ab3: test_normalization._airbyte_raw_exchange_rate + exchange_rate: test_normalization._airbyte_raw_exchange_rate + dedup_exchange_rate_ab1: test_normalization._airbyte_raw_dedup_exchange_rate + dedup_exchange_rate_ab2: test_normalization._airbyte_raw_dedup_exchange_rate + dedup_exchange_rate_stg: test_normalization._airbyte_raw_dedup_exchange_rate + dedup_exchange_rate_scd: test_normalization._airbyte_raw_dedup_exchange_rate + dedup_exchange_rate: test_normalization._airbyte_raw_dedup_exchange_rate + renamed_dedup_cdc_excluded_ab1: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded_ab2: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded_stg: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded_scd: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + dedup_cdc_excluded_ab1: test_normalization._airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded_ab2: test_normalization._airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded_stg: test_normalization._airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded_scd: test_normalization._airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded: test_normalization._airbyte_raw_dedup_cdc_excluded diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_dbt_project.yml b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_dbt_project.yml index 7631ef356dc9..200e87ca5ea7 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_dbt_project.yml +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_dbt_project.yml @@ -1,45 +1,29 @@ -# This file is necessary to install dbt-utils with dbt deps -# the content will be overwritten by the transform function - -# Name your package! Package names should contain only lowercase characters -# and underscores. A good package name should reflect your organization's -# name or the intended use of these models -name: "airbyte_utils" +name: airbyte_utils version: "1.0" config-version: 2 - -# This setting configures which "profile" dbt uses for this project. Profiles contain -# database connection information, and should be configured in the ~/.dbt/profiles.yml file -profile: "normalize" - -# These configurations specify where dbt should look for different types of files. -# The `model-paths` config, for example, states that source models can be found -# in the "models/" directory. You probably won't need to change these! -model-paths: ["models"] -docs-paths: ["docs"] -analysis-paths: ["analysis"] -test-paths: ["tests"] -seed-paths: ["data"] -macro-paths: ["macros"] - -target-path: "../build" # directory which will store compiled SQL files -log-path: "../logs" # directory which will store DBT logs -packages-install-path: "/dbt" # directory which will store external DBT dependencies - -clean-targets: # directories to be removed by `dbt clean` - - "build" - - "dbt_modules" - +profile: normalize +model-paths: + - models +docs-paths: + - docs +analysis-paths: + - analysis +test-paths: + - tests +seed-paths: + - data +macro-paths: + - macros +target-path: ../build +log-path: ../logs +packages-install-path: /dbt +clean-targets: + - build + - dbt_modules quoting: database: true - # Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) - # all schemas should be unquoted schema: false identifier: true - -# You can define configurations for models in the `model-paths` directory here. -# Using these configurations, you can enable or disable models, change how they -# are materialized, and more! models: airbyte_utils: +materialized: table @@ -57,7 +41,45 @@ models: airbyte_views: +tags: airbyte_internal_views +materialized: view - dispatch: - macro_namespace: dbt_utils - search_order: ["airbyte_utils", "dbt_utils"] + search_order: + - airbyte_utils + - dbt_utils +vars: + json_column: _airbyte_data + models_to_source: + exchange_rate_ab1: test_normalization._airbyte_raw_exchange_rate + exchange_rate_ab2: test_normalization._airbyte_raw_exchange_rate + exchange_rate_ab3: test_normalization._airbyte_raw_exchange_rate + exchange_rate: test_normalization._airbyte_raw_exchange_rate + dedup_exchange_rate_ab1: test_normalization._airbyte_raw_dedup_exchange_rate + dedup_exchange_rate_ab2: test_normalization._airbyte_raw_dedup_exchange_rate + dedup_exchange_rate_stg: test_normalization._airbyte_raw_dedup_exchange_rate + dedup_exchange_rate_scd: test_normalization._airbyte_raw_dedup_exchange_rate + dedup_exchange_rate: test_normalization._airbyte_raw_dedup_exchange_rate + renamed_dedup_cdc_excluded_ab1: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded_ab2: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded_stg: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded_scd: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + dedup_cdc_excluded_ab1: test_normalization._airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded_ab2: test_normalization._airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded_stg: test_normalization._airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded_scd: test_normalization._airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded: test_normalization._airbyte_raw_dedup_cdc_excluded + pos_dedup_cdcx_ab1: test_normalization._airbyte_raw_pos_dedup_cdcx + pos_dedup_cdcx_ab2: test_normalization._airbyte_raw_pos_dedup_cdcx + pos_dedup_cdcx_stg: test_normalization._airbyte_raw_pos_dedup_cdcx + pos_dedup_cdcx_scd: test_normalization._airbyte_raw_pos_dedup_cdcx + pos_dedup_cdcx: test_normalization._airbyte_raw_pos_dedup_cdcx + 1_prefix_startwith_number_ab1: test_normalization._airbyte_raw_1_prefix_startwith_number + 1_prefix_startwith_number_ab2: test_normalization._airbyte_raw_1_prefix_startwith_number + 1_prefix_startwith_number_stg: test_normalization._airbyte_raw_1_prefix_startwith_number + 1_prefix_startwith_number_scd: test_normalization._airbyte_raw_1_prefix_startwith_number + 1_prefix_startwith_number: test_normalization._airbyte_raw_1_prefix_startwith_number + multiple_column_names_conflicts_ab1: test_normalization._airbyte_raw_multiple_column_names_conflicts + multiple_column_names_conflicts_ab2: test_normalization._airbyte_raw_multiple_column_names_conflicts + multiple_column_names_conflicts_stg: test_normalization._airbyte_raw_multiple_column_names_conflicts + multiple_column_names_conflicts_scd: test_normalization._airbyte_raw_multiple_column_names_conflicts + multiple_column_names_conflicts: test_normalization._airbyte_raw_multiple_column_names_conflicts diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/scd/test_normalization/1_prefix_startwith_number_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/scd/test_normalization/1_prefix_startwith_number_scd.sql index 203534b3d53b..dac6628377db 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/scd/test_normalization/1_prefix_startwith_number_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/scd/test_normalization/1_prefix_startwith_number_scd.sql @@ -16,11 +16,7 @@ input_data as ( scd_data as ( -- SQL model to build a Type 2 Slowly Changing Dimension (SCD) table for each record identified by their primary key select - md5(cast(coalesce(cast("id" as - varchar -), '') as - varchar -)) as _airbyte_unique_key, + md5(cast(coalesce(cast("id" as text), '') as text)) as _airbyte_unique_key, "id", "date", "text", @@ -55,15 +51,7 @@ dedup_data as ( _airbyte_emitted_at order by _airbyte_active_row desc, _airbyte_ab_id ) as _airbyte_row_num, - md5(cast(coalesce(cast(_airbyte_unique_key as - varchar -), '') || '-' || coalesce(cast(_airbyte_start_at as - varchar -), '') || '-' || coalesce(cast(_airbyte_emitted_at as - varchar -), '') as - varchar -)) as _airbyte_unique_key_scd, + md5(cast(coalesce(cast(_airbyte_unique_key as text), '') || '-' || coalesce(cast(_airbyte_start_at as text), '') || '-' || coalesce(cast(_airbyte_emitted_at as text), '') as text)) as _airbyte_unique_key_scd, scd_data.* from scd_data ) diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/scd/test_normalization/dedup_cdc_excluded_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/scd/test_normalization/dedup_cdc_excluded_scd.sql index c9c2e087d956..ba66363a77f5 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/scd/test_normalization/dedup_cdc_excluded_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/scd/test_normalization/dedup_cdc_excluded_scd.sql @@ -16,11 +16,7 @@ input_data as ( scd_data as ( -- SQL model to build a Type 2 Slowly Changing Dimension (SCD) table for each record identified by their primary key select - md5(cast(coalesce(cast("id" as - varchar -), '') as - varchar -)) as _airbyte_unique_key, + md5(cast(coalesce(cast("id" as text), '') as text)) as _airbyte_unique_key, "id", "name", _ab_cdc_lsn, @@ -56,26 +52,10 @@ dedup_data as ( partition by _airbyte_unique_key, _airbyte_start_at, - _airbyte_emitted_at, cast(_ab_cdc_deleted_at as - varchar -), cast(_ab_cdc_updated_at as - varchar -) + _airbyte_emitted_at, cast(_ab_cdc_deleted_at as text), cast(_ab_cdc_updated_at as text) order by _airbyte_active_row desc, _airbyte_ab_id ) as _airbyte_row_num, - md5(cast(coalesce(cast(_airbyte_unique_key as - varchar -), '') || '-' || coalesce(cast(_airbyte_start_at as - varchar -), '') || '-' || coalesce(cast(_airbyte_emitted_at as - varchar -), '') || '-' || coalesce(cast(_ab_cdc_deleted_at as - varchar -), '') || '-' || coalesce(cast(_ab_cdc_updated_at as - varchar -), '') as - varchar -)) as _airbyte_unique_key_scd, + md5(cast(coalesce(cast(_airbyte_unique_key as text), '') || '-' || coalesce(cast(_airbyte_start_at as text), '') || '-' || coalesce(cast(_airbyte_emitted_at as text), '') || '-' || coalesce(cast(_ab_cdc_deleted_at as text), '') || '-' || coalesce(cast(_ab_cdc_updated_at as text), '') as text)) as _airbyte_unique_key_scd, scd_data.* from scd_data ) diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql index 3db3150ff276..c9440958247d 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql @@ -16,15 +16,7 @@ input_data as ( scd_data as ( -- SQL model to build a Type 2 Slowly Changing Dimension (SCD) table for each record identified by their primary key select - md5(cast(coalesce(cast("id" as - varchar -), '') || '-' || coalesce(cast(currency as - varchar -), '') || '-' || coalesce(cast(nzd as - varchar -), '') as - varchar -)) as _airbyte_unique_key, + md5(cast(coalesce(cast("id" as text), '') || '-' || coalesce(cast(currency as text), '') || '-' || coalesce(cast(nzd as text), '') as text)) as _airbyte_unique_key, "id", currency, "date", @@ -35,18 +27,14 @@ scd_data as ( usd, "date" as _airbyte_start_at, lag("date") over ( - partition by "id", currency, cast(nzd as - varchar -) + partition by "id", currency, cast(nzd as text) order by "date" is null asc, "date" desc, _airbyte_emitted_at desc ) as _airbyte_end_at, case when row_number() over ( - partition by "id", currency, cast(nzd as - varchar -) + partition by "id", currency, cast(nzd as text) order by "date" is null asc, "date" desc, @@ -68,15 +56,7 @@ dedup_data as ( _airbyte_emitted_at order by _airbyte_active_row desc, _airbyte_ab_id ) as _airbyte_row_num, - md5(cast(coalesce(cast(_airbyte_unique_key as - varchar -), '') || '-' || coalesce(cast(_airbyte_start_at as - varchar -), '') || '-' || coalesce(cast(_airbyte_emitted_at as - varchar -), '') as - varchar -)) as _airbyte_unique_key_scd, + md5(cast(coalesce(cast(_airbyte_unique_key as text), '') || '-' || coalesce(cast(_airbyte_start_at as text), '') || '-' || coalesce(cast(_airbyte_emitted_at as text), '') as text)) as _airbyte_unique_key_scd, scd_data.* from scd_data ) diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/scd/test_normalization/multiple_column_names_conflicts_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/scd/test_normalization/multiple_column_names_conflicts_scd.sql index e94644c18a17..9eb7e6e349ab 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/scd/test_normalization/multiple_column_names_conflicts_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/scd/test_normalization/multiple_column_names_conflicts_scd.sql @@ -16,11 +16,7 @@ input_data as ( scd_data as ( -- SQL model to build a Type 2 Slowly Changing Dimension (SCD) table for each record identified by their primary key select - md5(cast(coalesce(cast("id" as - varchar -), '') as - varchar -)) as _airbyte_unique_key, + md5(cast(coalesce(cast("id" as text), '') as text)) as _airbyte_unique_key, "id", "User Id", user_id, @@ -59,15 +55,7 @@ dedup_data as ( _airbyte_emitted_at order by _airbyte_active_row desc, _airbyte_ab_id ) as _airbyte_row_num, - md5(cast(coalesce(cast(_airbyte_unique_key as - varchar -), '') || '-' || coalesce(cast(_airbyte_start_at as - varchar -), '') || '-' || coalesce(cast(_airbyte_emitted_at as - varchar -), '') as - varchar -)) as _airbyte_unique_key_scd, + md5(cast(coalesce(cast(_airbyte_unique_key as text), '') || '-' || coalesce(cast(_airbyte_start_at as text), '') || '-' || coalesce(cast(_airbyte_emitted_at as text), '') as text)) as _airbyte_unique_key_scd, scd_data.* from scd_data ) diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/scd/test_normalization/pos_dedup_cdcx_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/scd/test_normalization/pos_dedup_cdcx_scd.sql index 438b303238b5..450815d1ccc5 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/scd/test_normalization/pos_dedup_cdcx_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/scd/test_normalization/pos_dedup_cdcx_scd.sql @@ -16,11 +16,7 @@ input_data as ( scd_data as ( -- SQL model to build a Type 2 Slowly Changing Dimension (SCD) table for each record identified by their primary key select - md5(cast(coalesce(cast("id" as - varchar -), '') as - varchar -)) as _airbyte_unique_key, + md5(cast(coalesce(cast("id" as text), '') as text)) as _airbyte_unique_key, "id", "name", _ab_cdc_lsn, @@ -59,30 +55,10 @@ dedup_data as ( partition by _airbyte_unique_key, _airbyte_start_at, - _airbyte_emitted_at, cast(_ab_cdc_deleted_at as - varchar -), cast(_ab_cdc_updated_at as - varchar -), cast(_ab_cdc_log_pos as - varchar -) + _airbyte_emitted_at, cast(_ab_cdc_deleted_at as text), cast(_ab_cdc_updated_at as text), cast(_ab_cdc_log_pos as text) order by _airbyte_active_row desc, _airbyte_ab_id ) as _airbyte_row_num, - md5(cast(coalesce(cast(_airbyte_unique_key as - varchar -), '') || '-' || coalesce(cast(_airbyte_start_at as - varchar -), '') || '-' || coalesce(cast(_airbyte_emitted_at as - varchar -), '') || '-' || coalesce(cast(_ab_cdc_deleted_at as - varchar -), '') || '-' || coalesce(cast(_ab_cdc_updated_at as - varchar -), '') || '-' || coalesce(cast(_ab_cdc_log_pos as - varchar -), '') as - varchar -)) as _airbyte_unique_key_scd, + md5(cast(coalesce(cast(_airbyte_unique_key as text), '') || '-' || coalesce(cast(_airbyte_start_at as text), '') || '-' || coalesce(cast(_airbyte_emitted_at as text), '') || '-' || coalesce(cast(_ab_cdc_deleted_at as text), '') || '-' || coalesce(cast(_ab_cdc_updated_at as text), '') || '-' || coalesce(cast(_ab_cdc_log_pos as text), '') as text)) as _airbyte_unique_key_scd, scd_data.* from scd_data ) diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/scd/test_normalization/renamed_dedup_cdc_excluded_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/scd/test_normalization/renamed_dedup_cdc_excluded_scd.sql index 414ed447cc0b..31e25e700b60 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/scd/test_normalization/renamed_dedup_cdc_excluded_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/scd/test_normalization/renamed_dedup_cdc_excluded_scd.sql @@ -16,11 +16,7 @@ input_data as ( scd_data as ( -- SQL model to build a Type 2 Slowly Changing Dimension (SCD) table for each record identified by their primary key select - md5(cast(coalesce(cast("id" as - varchar -), '') as - varchar -)) as _airbyte_unique_key, + md5(cast(coalesce(cast("id" as text), '') as text)) as _airbyte_unique_key, "id", _ab_cdc_updated_at, _ab_cdc_updated_at as _airbyte_start_at, @@ -54,15 +50,7 @@ dedup_data as ( _airbyte_emitted_at order by _airbyte_active_row desc, _airbyte_ab_id ) as _airbyte_row_num, - md5(cast(coalesce(cast(_airbyte_unique_key as - varchar -), '') || '-' || coalesce(cast(_airbyte_start_at as - varchar -), '') || '-' || coalesce(cast(_airbyte_emitted_at as - varchar -), '') as - varchar -)) as _airbyte_unique_key_scd, + md5(cast(coalesce(cast(_airbyte_unique_key as text), '') || '-' || coalesce(cast(_airbyte_start_at as text), '') || '-' || coalesce(cast(_airbyte_emitted_at as text), '') as text)) as _airbyte_unique_key_scd, scd_data.* from scd_data ) diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/test_normalization/1_prefix_startwith_number_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/test_normalization/1_prefix_startwith_number_stg.sql index 1d6a4096615f..94b51fa8be0b 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/test_normalization/1_prefix_startwith_number_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/test_normalization/1_prefix_startwith_number_stg.sql @@ -30,9 +30,7 @@ select cast(nullif("date", '') as date ) as "date", - cast("text" as - varchar -) as "text", + cast("text" as text) as "text", _airbyte_ab_id, _airbyte_emitted_at, now() as _airbyte_normalized_at @@ -43,15 +41,7 @@ where 1 = 1 )-- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__1_prefix_startwith_number_ab2 select - md5(cast(coalesce(cast("id" as - varchar -), '') || '-' || coalesce(cast("date" as - varchar -), '') || '-' || coalesce(cast("text" as - varchar -), '') as - varchar -)) as _airbyte_1_prefix_startwith_number_hashid, + md5(cast(coalesce(cast("id" as text), '') || '-' || coalesce(cast("date" as text), '') || '-' || coalesce(cast("text" as text), '') as text)) as _airbyte_1_prefix_startwith_number_hashid, tmp.* from __dbt__cte__1_prefix_startwith_number_ab2 tmp -- 1_prefix_startwith_number diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/test_normalization/dedup_cdc_excluded_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/test_normalization/dedup_cdc_excluded_stg.sql index 6eaa134afe67..1c688fb2faa5 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/test_normalization/dedup_cdc_excluded_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/test_normalization/dedup_cdc_excluded_stg.sql @@ -29,9 +29,7 @@ select cast("id" as bigint ) as "id", - cast("name" as - varchar -) as "name", + cast("name" as text) as "name", cast(_ab_cdc_lsn as float ) as _ab_cdc_lsn, @@ -51,19 +49,7 @@ where 1 = 1 )-- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__dedup_cdc_excluded_ab2 select - md5(cast(coalesce(cast("id" as - varchar -), '') || '-' || coalesce(cast("name" as - varchar -), '') || '-' || coalesce(cast(_ab_cdc_lsn as - varchar -), '') || '-' || coalesce(cast(_ab_cdc_updated_at as - varchar -), '') || '-' || coalesce(cast(_ab_cdc_deleted_at as - varchar -), '') as - varchar -)) as _airbyte_dedup_cdc_excluded_hashid, + md5(cast(coalesce(cast("id" as text), '') || '-' || coalesce(cast("name" as text), '') || '-' || coalesce(cast(_ab_cdc_lsn as text), '') || '-' || coalesce(cast(_ab_cdc_updated_at as text), '') || '-' || coalesce(cast(_ab_cdc_deleted_at as text), '') as text)) as _airbyte_dedup_cdc_excluded_hashid, tmp.* from __dbt__cte__dedup_cdc_excluded_ab2 tmp -- dedup_cdc_excluded diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/test_normalization/dedup_exchange_rate_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/test_normalization/dedup_exchange_rate_stg.sql index 55fe38117c0d..128ec051327d 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/test_normalization/dedup_exchange_rate_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/test_normalization/dedup_exchange_rate_stg.sql @@ -32,9 +32,7 @@ select cast("id" as bigint ) as "id", - cast(currency as - varchar -) as currency, + cast(currency as text) as currency, cast(nullif("date", '') as date ) as "date", @@ -44,9 +42,7 @@ select cast("HKD@spƩƧiƤl & characters" as float ) as "HKD@spƩƧiƤl & characters", - cast(hkd_special___characters as - varchar -) as hkd_special___characters, + cast(hkd_special___characters as text) as hkd_special___characters, cast(nzd as float ) as nzd, @@ -63,25 +59,7 @@ where 1 = 1 )-- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__dedup_exchange_rate_ab2 select - md5(cast(coalesce(cast("id" as - varchar -), '') || '-' || coalesce(cast(currency as - varchar -), '') || '-' || coalesce(cast("date" as - varchar -), '') || '-' || coalesce(cast(timestamp_col as - varchar -), '') || '-' || coalesce(cast("HKD@spƩƧiƤl & characters" as - varchar -), '') || '-' || coalesce(cast(hkd_special___characters as - varchar -), '') || '-' || coalesce(cast(nzd as - varchar -), '') || '-' || coalesce(cast(usd as - varchar -), '') as - varchar -)) as _airbyte_dedup_exchange_rate_hashid, + md5(cast(coalesce(cast("id" as text), '') || '-' || coalesce(cast(currency as text), '') || '-' || coalesce(cast("date" as text), '') || '-' || coalesce(cast(timestamp_col as text), '') || '-' || coalesce(cast("HKD@spƩƧiƤl & characters" as text), '') || '-' || coalesce(cast(hkd_special___characters as text), '') || '-' || coalesce(cast(nzd as text), '') || '-' || coalesce(cast(usd as text), '') as text)) as _airbyte_dedup_exchange_rate_hashid, tmp.* from __dbt__cte__dedup_exchange_rate_ab2 tmp -- dedup_exchange_rate diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/test_normalization/multiple_column_names_conflicts_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/test_normalization/multiple_column_names_conflicts_stg.sql index 7a2c133f995f..dbb4726faf8f 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/test_normalization/multiple_column_names_conflicts_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/test_normalization/multiple_column_names_conflicts_stg.sql @@ -31,9 +31,7 @@ select cast("id" as bigint ) as "id", - cast("User Id" as - varchar -) as "User Id", + cast("User Id" as text) as "User Id", cast(user_id as float ) as user_id, @@ -43,9 +41,7 @@ select cast("user id" as float ) as "user id", - cast("User@Id" as - varchar -) as "User@Id", + cast("User@Id" as text) as "User@Id", cast(userid as float ) as userid, @@ -59,23 +55,7 @@ where 1 = 1 )-- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__multiple_column_names_conflicts_ab2 select - md5(cast(coalesce(cast("id" as - varchar -), '') || '-' || coalesce(cast("User Id" as - varchar -), '') || '-' || coalesce(cast(user_id as - varchar -), '') || '-' || coalesce(cast("User id" as - varchar -), '') || '-' || coalesce(cast("user id" as - varchar -), '') || '-' || coalesce(cast("User@Id" as - varchar -), '') || '-' || coalesce(cast(userid as - varchar -), '') as - varchar -)) as _airbyte_multiple_co__ames_conflicts_hashid, + md5(cast(coalesce(cast("id" as text), '') || '-' || coalesce(cast("User Id" as text), '') || '-' || coalesce(cast(user_id as text), '') || '-' || coalesce(cast("User id" as text), '') || '-' || coalesce(cast("user id" as text), '') || '-' || coalesce(cast("User@Id" as text), '') || '-' || coalesce(cast(userid as text), '') as text)) as _airbyte_multiple_co__ames_conflicts_hashid, tmp.* from __dbt__cte__multiple_column_names_conflicts_ab2 tmp -- multiple_column_names_conflicts diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/test_normalization/pos_dedup_cdcx_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/test_normalization/pos_dedup_cdcx_stg.sql index d3cbb9433c93..1b28a6bd09dd 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/test_normalization/pos_dedup_cdcx_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/test_normalization/pos_dedup_cdcx_stg.sql @@ -30,9 +30,7 @@ select cast("id" as bigint ) as "id", - cast("name" as - varchar -) as "name", + cast("name" as text) as "name", cast(_ab_cdc_lsn as float ) as _ab_cdc_lsn, @@ -55,21 +53,7 @@ where 1 = 1 )-- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__pos_dedup_cdcx_ab2 select - md5(cast(coalesce(cast("id" as - varchar -), '') || '-' || coalesce(cast("name" as - varchar -), '') || '-' || coalesce(cast(_ab_cdc_lsn as - varchar -), '') || '-' || coalesce(cast(_ab_cdc_updated_at as - varchar -), '') || '-' || coalesce(cast(_ab_cdc_deleted_at as - varchar -), '') || '-' || coalesce(cast(_ab_cdc_log_pos as - varchar -), '') as - varchar -)) as _airbyte_pos_dedup_cdcx_hashid, + md5(cast(coalesce(cast("id" as text), '') || '-' || coalesce(cast("name" as text), '') || '-' || coalesce(cast(_ab_cdc_lsn as text), '') || '-' || coalesce(cast(_ab_cdc_updated_at as text), '') || '-' || coalesce(cast(_ab_cdc_deleted_at as text), '') || '-' || coalesce(cast(_ab_cdc_log_pos as text), '') as text)) as _airbyte_pos_dedup_cdcx_hashid, tmp.* from __dbt__cte__pos_dedup_cdcx_ab2 tmp -- pos_dedup_cdcx diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded_stg.sql index 8fb3cb3a5c34..7fba3805f396 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded_stg.sql @@ -39,13 +39,7 @@ where 1 = 1 )-- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__renamed_dedup_cdc_excluded_ab2 select - md5(cast(coalesce(cast("id" as - varchar -), '') || '-' || coalesce(cast(_ab_cdc_updated_at as - varchar -), '') as - varchar -)) as _airbyte_renamed_dedup_cdc_excluded_hashid, + md5(cast(coalesce(cast("id" as text), '') || '-' || coalesce(cast(_ab_cdc_updated_at as text), '') as text)) as _airbyte_renamed_dedup_cdc_excluded_hashid, tmp.* from __dbt__cte__renamed_dedup_cdc_excluded_ab2 tmp -- renamed_dedup_cdc_excluded diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_tables/test_normalization/exchange_rate.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_tables/test_normalization/exchange_rate.sql index 7d795f97e67e..2a24e704fda2 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_tables/test_normalization/exchange_rate.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_output/airbyte_tables/test_normalization/exchange_rate.sql @@ -31,9 +31,7 @@ select cast("id" as bigint ) as "id", - cast(currency as - varchar -) as currency, + cast(currency as text) as currency, cast(nullif("date", '') as date ) as "date", @@ -43,18 +41,14 @@ select cast("HKD@spƩƧiƤl & characters" as float ) as "HKD@spƩƧiƤl & characters", - cast(hkd_special___characters as - varchar -) as hkd_special___characters, + cast(hkd_special___characters as text) as hkd_special___characters, cast(nzd as float ) as nzd, cast(usd as float ) as usd, - cast("column`_'with""_quotes" as - varchar -) as "column`_'with""_quotes", + cast("column`_'with""_quotes" as text) as "column`_'with""_quotes", _airbyte_ab_id, _airbyte_emitted_at, now() as _airbyte_normalized_at @@ -66,27 +60,7 @@ where 1 = 1 -- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__exchange_rate_ab2 select - md5(cast(coalesce(cast("id" as - varchar -), '') || '-' || coalesce(cast(currency as - varchar -), '') || '-' || coalesce(cast("date" as - varchar -), '') || '-' || coalesce(cast(timestamp_col as - varchar -), '') || '-' || coalesce(cast("HKD@spƩƧiƤl & characters" as - varchar -), '') || '-' || coalesce(cast(hkd_special___characters as - varchar -), '') || '-' || coalesce(cast(nzd as - varchar -), '') || '-' || coalesce(cast(usd as - varchar -), '') || '-' || coalesce(cast("column`_'with""_quotes" as - varchar -), '') as - varchar -)) as _airbyte_exchange_rate_hashid, + md5(cast(coalesce(cast("id" as text), '') || '-' || coalesce(cast(currency as text), '') || '-' || coalesce(cast("date" as text), '') || '-' || coalesce(cast(timestamp_col as text), '') || '-' || coalesce(cast("HKD@spƩƧiƤl & characters" as text), '') || '-' || coalesce(cast(hkd_special___characters as text), '') || '-' || coalesce(cast(nzd as text), '') || '-' || coalesce(cast(usd as text), '') || '-' || coalesce(cast("column`_'with""_quotes" as text), '') as text)) as _airbyte_exchange_rate_hashid, tmp.* from __dbt__cte__exchange_rate_ab2 tmp -- exchange_rate diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/1_prefix_startwith_number_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/1_prefix_startwith_number_ab1.sql index 080ffcc0b14c..f6697dcec757 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/1_prefix_startwith_number_ab1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/1_prefix_startwith_number_ab1.sql @@ -16,5 +16,5 @@ select from {{ source('test_normalization', '_airbyte_raw_1_prefix_startwith_number') }} as table_alias -- 1_prefix_startwith_number where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/1_prefix_startwith_number_ab2.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/1_prefix_startwith_number_ab2.sql index 5402072233ba..a9dd51672585 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/1_prefix_startwith_number_ab2.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/1_prefix_startwith_number_ab2.sql @@ -16,5 +16,5 @@ select from {{ ref('1_prefix_startwith_number_ab1') }} -- 1_prefix_startwith_number where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_cdc_excluded_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_cdc_excluded_ab1.sql index 5f212003c29f..99a03831a8ba 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_cdc_excluded_ab1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_cdc_excluded_ab1.sql @@ -18,5 +18,5 @@ select from {{ source('test_normalization', '_airbyte_raw_dedup_cdc_excluded') }} as table_alias -- dedup_cdc_excluded where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_cdc_excluded_ab2.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_cdc_excluded_ab2.sql index fb5d23a430df..3d8803e27a66 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_cdc_excluded_ab2.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_cdc_excluded_ab2.sql @@ -18,5 +18,5 @@ select from {{ ref('dedup_cdc_excluded_ab1') }} -- dedup_cdc_excluded where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql index 49d750afb636..5009554c3391 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql @@ -21,5 +21,5 @@ select from {{ source('test_normalization', '_airbyte_raw_dedup_exchange_rate') }} as table_alias -- dedup_exchange_rate where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql index a6f5b4c6fda6..187fc05ccc6f 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql @@ -21,5 +21,5 @@ select from {{ ref('dedup_exchange_rate_ab1') }} -- dedup_exchange_rate where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/multiple_column_names_conflicts_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/multiple_column_names_conflicts_ab1.sql index 7268a550c156..3444e2fe46f9 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/multiple_column_names_conflicts_ab1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/multiple_column_names_conflicts_ab1.sql @@ -20,5 +20,5 @@ select from {{ source('test_normalization', '_airbyte_raw_multiple_column_names_conflicts') }} as table_alias -- multiple_column_names_conflicts where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/multiple_column_names_conflicts_ab2.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/multiple_column_names_conflicts_ab2.sql index afed155ffbd8..263d011d1bde 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/multiple_column_names_conflicts_ab2.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/multiple_column_names_conflicts_ab2.sql @@ -20,5 +20,5 @@ select from {{ ref('multiple_column_names_conflicts_ab1') }} -- multiple_column_names_conflicts where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/pos_dedup_cdcx_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/pos_dedup_cdcx_ab1.sql index bb2d814a0620..ee8f1538acb4 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/pos_dedup_cdcx_ab1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/pos_dedup_cdcx_ab1.sql @@ -19,5 +19,5 @@ select from {{ source('test_normalization', '_airbyte_raw_pos_dedup_cdcx') }} as table_alias -- pos_dedup_cdcx where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/pos_dedup_cdcx_ab2.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/pos_dedup_cdcx_ab2.sql index ec0e36dbec13..96c252758b6d 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/pos_dedup_cdcx_ab2.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/pos_dedup_cdcx_ab2.sql @@ -19,5 +19,5 @@ select from {{ ref('pos_dedup_cdcx_ab1') }} -- pos_dedup_cdcx where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/renamed_dedup_cdc_excluded_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/renamed_dedup_cdc_excluded_ab1.sql index e75261bd70a4..fbe40aebf3c7 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/renamed_dedup_cdc_excluded_ab1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/renamed_dedup_cdc_excluded_ab1.sql @@ -15,5 +15,5 @@ select from {{ source('test_normalization', '_airbyte_raw_renamed_dedup_cdc_excluded') }} as table_alias -- renamed_dedup_cdc_excluded where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/renamed_dedup_cdc_excluded_ab2.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/renamed_dedup_cdc_excluded_ab2.sql index f7a91a73a73c..f0b99802de8b 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/renamed_dedup_cdc_excluded_ab2.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_ctes/test_normalization/renamed_dedup_cdc_excluded_ab2.sql @@ -15,5 +15,5 @@ select from {{ ref('renamed_dedup_cdc_excluded_ab1') }} -- renamed_dedup_cdc_excluded where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/1_prefix_startwith_number_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/1_prefix_startwith_number_scd.sql index e2ade95cd401..01e0c49d1c7c 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/1_prefix_startwith_number_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/1_prefix_startwith_number_scd.sql @@ -2,7 +2,53 @@ indexes = [{'columns':['_airbyte_active_row','_airbyte_unique_key_scd','_airbyte_emitted_at'],'type': 'btree'}], unique_key = "_airbyte_unique_key_scd", schema = "test_normalization", - post_hook = ["delete from _airbyte_test_normalization.{{ adapter.quote('1_prefix_startwith_number_stg') }} where _airbyte_emitted_at != (select max(_airbyte_emitted_at) from _airbyte_test_normalization.{{ adapter.quote('1_prefix_startwith_number_stg') }})"], + post_hook = [" + {% + set final_table_relation = adapter.get_relation( + database=this.database, + schema=this.schema, + identifier='1_prefix_startwith_number' + ) + %} + {# + If the final table doesn't exist, then obviously we can't delete anything from it. + Also, after a reset, the final table is created without the _airbyte_unique_key column (this column is created during the first sync) + So skip this deletion if the column doesn't exist. (in this case, the table is guaranteed to be empty anyway) + #} + {% + if final_table_relation is not none and '_airbyte_unique_key' in adapter.get_columns_in_relation(final_table_relation)|map(attribute='name') + %} + -- Delete records which are no longer active: + -- This query is equivalent, but the left join version is more performant: + -- delete from final_table where unique_key in ( + -- select unique_key from scd_table where 1 = 1 + -- ) and unique_key not in ( + -- select unique_key from scd_table where active_row = 1 + -- ) + -- We're incremental against normalized_at rather than emitted_at because we need to fetch the SCD + -- entries that were _updated_ recently. This is because a deleted record will have an SCD record + -- which was emitted a long time ago, but recently re-normalized to have active_row = 0. + delete from {{ final_table_relation }} where {{ final_table_relation }}._airbyte_unique_key in ( + select recent_records.unique_key + from ( + select distinct _airbyte_unique_key as unique_key + from {{ this }} + where 1=1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('1_prefix_startwith_number')) }} + ) recent_records + left join ( + select _airbyte_unique_key as unique_key, count(_airbyte_unique_key) as active_count + from {{ this }} + where _airbyte_active_row = 1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('1_prefix_startwith_number')) }} + group by _airbyte_unique_key + ) active_counts + on recent_records.unique_key = active_counts.unique_key + where active_count is null or active_count = 0 + ) + {% else %} + -- We have to have a non-empty query, so just do a noop delete + delete from {{ this }} where 1=0 + {% endif %} + ","delete from _airbyte_test_normalization.{{ adapter.quote('1_prefix_startwith_number_stg') }} where _airbyte_emitted_at != (select max(_airbyte_emitted_at) from _airbyte_test_normalization.{{ adapter.quote('1_prefix_startwith_number_stg') }})"], tags = [ "top-level" ] ) }} -- depends_on: ref('1_prefix_startwith_number_stg') @@ -15,7 +61,7 @@ new_data as ( from {{ ref('1_prefix_startwith_number_stg') }} -- 1_prefix_startwith_number from {{ source('test_normalization', '_airbyte_raw_1_prefix_startwith_number') }} where 1 = 1 - {{ incremental_clause('_airbyte_emitted_at') }} + {{ incremental_clause('_airbyte_emitted_at', this) }} ), new_data_ids as ( -- build a subset of _airbyte_unique_key from rows that are new diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/dedup_cdc_excluded_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/dedup_cdc_excluded_scd.sql index 2fb3816fb87f..5affe9825e3b 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/dedup_cdc_excluded_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/dedup_cdc_excluded_scd.sql @@ -2,7 +2,53 @@ indexes = [{'columns':['_airbyte_active_row','_airbyte_unique_key_scd','_airbyte_emitted_at'],'type': 'btree'}], unique_key = "_airbyte_unique_key_scd", schema = "test_normalization", - post_hook = ["delete from _airbyte_test_normalization.dedup_cdc_excluded_stg where _airbyte_emitted_at != (select max(_airbyte_emitted_at) from _airbyte_test_normalization.dedup_cdc_excluded_stg)"], + post_hook = [" + {% + set final_table_relation = adapter.get_relation( + database=this.database, + schema=this.schema, + identifier='dedup_cdc_excluded' + ) + %} + {# + If the final table doesn't exist, then obviously we can't delete anything from it. + Also, after a reset, the final table is created without the _airbyte_unique_key column (this column is created during the first sync) + So skip this deletion if the column doesn't exist. (in this case, the table is guaranteed to be empty anyway) + #} + {% + if final_table_relation is not none and '_airbyte_unique_key' in adapter.get_columns_in_relation(final_table_relation)|map(attribute='name') + %} + -- Delete records which are no longer active: + -- This query is equivalent, but the left join version is more performant: + -- delete from final_table where unique_key in ( + -- select unique_key from scd_table where 1 = 1 + -- ) and unique_key not in ( + -- select unique_key from scd_table where active_row = 1 + -- ) + -- We're incremental against normalized_at rather than emitted_at because we need to fetch the SCD + -- entries that were _updated_ recently. This is because a deleted record will have an SCD record + -- which was emitted a long time ago, but recently re-normalized to have active_row = 0. + delete from {{ final_table_relation }} where {{ final_table_relation }}._airbyte_unique_key in ( + select recent_records.unique_key + from ( + select distinct _airbyte_unique_key as unique_key + from {{ this }} + where 1=1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('dedup_cdc_excluded')) }} + ) recent_records + left join ( + select _airbyte_unique_key as unique_key, count(_airbyte_unique_key) as active_count + from {{ this }} + where _airbyte_active_row = 1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('dedup_cdc_excluded')) }} + group by _airbyte_unique_key + ) active_counts + on recent_records.unique_key = active_counts.unique_key + where active_count is null or active_count = 0 + ) + {% else %} + -- We have to have a non-empty query, so just do a noop delete + delete from {{ this }} where 1=0 + {% endif %} + ","delete from _airbyte_test_normalization.dedup_cdc_excluded_stg where _airbyte_emitted_at != (select max(_airbyte_emitted_at) from _airbyte_test_normalization.dedup_cdc_excluded_stg)"], tags = [ "top-level" ] ) }} -- depends_on: ref('dedup_cdc_excluded_stg') @@ -15,7 +61,7 @@ new_data as ( from {{ ref('dedup_cdc_excluded_stg') }} -- dedup_cdc_excluded from {{ source('test_normalization', '_airbyte_raw_dedup_cdc_excluded') }} where 1 = 1 - {{ incremental_clause('_airbyte_emitted_at') }} + {{ incremental_clause('_airbyte_emitted_at', this) }} ), new_data_ids as ( -- build a subset of _airbyte_unique_key from rows that are new diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql index 7234b26c0f81..ef0cf7e1e95f 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql @@ -2,7 +2,53 @@ indexes = [{'columns':['_airbyte_active_row','_airbyte_unique_key_scd','_airbyte_emitted_at'],'type': 'btree'}], unique_key = "_airbyte_unique_key_scd", schema = "test_normalization", - post_hook = ["delete from _airbyte_test_normalization.dedup_exchange_rate_stg where _airbyte_emitted_at != (select max(_airbyte_emitted_at) from _airbyte_test_normalization.dedup_exchange_rate_stg)"], + post_hook = [" + {% + set final_table_relation = adapter.get_relation( + database=this.database, + schema=this.schema, + identifier='dedup_exchange_rate' + ) + %} + {# + If the final table doesn't exist, then obviously we can't delete anything from it. + Also, after a reset, the final table is created without the _airbyte_unique_key column (this column is created during the first sync) + So skip this deletion if the column doesn't exist. (in this case, the table is guaranteed to be empty anyway) + #} + {% + if final_table_relation is not none and '_airbyte_unique_key' in adapter.get_columns_in_relation(final_table_relation)|map(attribute='name') + %} + -- Delete records which are no longer active: + -- This query is equivalent, but the left join version is more performant: + -- delete from final_table where unique_key in ( + -- select unique_key from scd_table where 1 = 1 + -- ) and unique_key not in ( + -- select unique_key from scd_table where active_row = 1 + -- ) + -- We're incremental against normalized_at rather than emitted_at because we need to fetch the SCD + -- entries that were _updated_ recently. This is because a deleted record will have an SCD record + -- which was emitted a long time ago, but recently re-normalized to have active_row = 0. + delete from {{ final_table_relation }} where {{ final_table_relation }}._airbyte_unique_key in ( + select recent_records.unique_key + from ( + select distinct _airbyte_unique_key as unique_key + from {{ this }} + where 1=1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('dedup_exchange_rate')) }} + ) recent_records + left join ( + select _airbyte_unique_key as unique_key, count(_airbyte_unique_key) as active_count + from {{ this }} + where _airbyte_active_row = 1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('dedup_exchange_rate')) }} + group by _airbyte_unique_key + ) active_counts + on recent_records.unique_key = active_counts.unique_key + where active_count is null or active_count = 0 + ) + {% else %} + -- We have to have a non-empty query, so just do a noop delete + delete from {{ this }} where 1=0 + {% endif %} + ","delete from _airbyte_test_normalization.dedup_exchange_rate_stg where _airbyte_emitted_at != (select max(_airbyte_emitted_at) from _airbyte_test_normalization.dedup_exchange_rate_stg)"], tags = [ "top-level" ] ) }} -- depends_on: ref('dedup_exchange_rate_stg') @@ -15,7 +61,7 @@ new_data as ( from {{ ref('dedup_exchange_rate_stg') }} -- dedup_exchange_rate from {{ source('test_normalization', '_airbyte_raw_dedup_exchange_rate') }} where 1 = 1 - {{ incremental_clause('_airbyte_emitted_at') }} + {{ incremental_clause('_airbyte_emitted_at', this) }} ), new_data_ids as ( -- build a subset of _airbyte_unique_key from rows that are new diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/multiple_column_names_conflicts_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/multiple_column_names_conflicts_scd.sql index 736e25452ae3..77d393c85689 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/multiple_column_names_conflicts_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/multiple_column_names_conflicts_scd.sql @@ -2,7 +2,53 @@ indexes = [{'columns':['_airbyte_active_row','_airbyte_unique_key_scd','_airbyte_emitted_at'],'type': 'btree'}], unique_key = "_airbyte_unique_key_scd", schema = "test_normalization", - post_hook = ["delete from _airbyte_test_normalization.multiple_column_names_conflicts_stg where _airbyte_emitted_at != (select max(_airbyte_emitted_at) from _airbyte_test_normalization.multiple_column_names_conflicts_stg)"], + post_hook = [" + {% + set final_table_relation = adapter.get_relation( + database=this.database, + schema=this.schema, + identifier='multiple_column_names_conflicts' + ) + %} + {# + If the final table doesn't exist, then obviously we can't delete anything from it. + Also, after a reset, the final table is created without the _airbyte_unique_key column (this column is created during the first sync) + So skip this deletion if the column doesn't exist. (in this case, the table is guaranteed to be empty anyway) + #} + {% + if final_table_relation is not none and '_airbyte_unique_key' in adapter.get_columns_in_relation(final_table_relation)|map(attribute='name') + %} + -- Delete records which are no longer active: + -- This query is equivalent, but the left join version is more performant: + -- delete from final_table where unique_key in ( + -- select unique_key from scd_table where 1 = 1 + -- ) and unique_key not in ( + -- select unique_key from scd_table where active_row = 1 + -- ) + -- We're incremental against normalized_at rather than emitted_at because we need to fetch the SCD + -- entries that were _updated_ recently. This is because a deleted record will have an SCD record + -- which was emitted a long time ago, but recently re-normalized to have active_row = 0. + delete from {{ final_table_relation }} where {{ final_table_relation }}._airbyte_unique_key in ( + select recent_records.unique_key + from ( + select distinct _airbyte_unique_key as unique_key + from {{ this }} + where 1=1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('multiple_column_names_conflicts')) }} + ) recent_records + left join ( + select _airbyte_unique_key as unique_key, count(_airbyte_unique_key) as active_count + from {{ this }} + where _airbyte_active_row = 1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('multiple_column_names_conflicts')) }} + group by _airbyte_unique_key + ) active_counts + on recent_records.unique_key = active_counts.unique_key + where active_count is null or active_count = 0 + ) + {% else %} + -- We have to have a non-empty query, so just do a noop delete + delete from {{ this }} where 1=0 + {% endif %} + ","delete from _airbyte_test_normalization.multiple_column_names_conflicts_stg where _airbyte_emitted_at != (select max(_airbyte_emitted_at) from _airbyte_test_normalization.multiple_column_names_conflicts_stg)"], tags = [ "top-level" ] ) }} -- depends_on: ref('multiple_column_names_conflicts_stg') @@ -15,7 +61,7 @@ new_data as ( from {{ ref('multiple_column_names_conflicts_stg') }} -- multiple_column_names_conflicts from {{ source('test_normalization', '_airbyte_raw_multiple_column_names_conflicts') }} where 1 = 1 - {{ incremental_clause('_airbyte_emitted_at') }} + {{ incremental_clause('_airbyte_emitted_at', this) }} ), new_data_ids as ( -- build a subset of _airbyte_unique_key from rows that are new diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/pos_dedup_cdcx_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/pos_dedup_cdcx_scd.sql index 1512b6fe8546..ff471c6abaab 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/pos_dedup_cdcx_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/pos_dedup_cdcx_scd.sql @@ -2,7 +2,53 @@ indexes = [{'columns':['_airbyte_active_row','_airbyte_unique_key_scd','_airbyte_emitted_at'],'type': 'btree'}], unique_key = "_airbyte_unique_key_scd", schema = "test_normalization", - post_hook = ["delete from _airbyte_test_normalization.pos_dedup_cdcx_stg where _airbyte_emitted_at != (select max(_airbyte_emitted_at) from _airbyte_test_normalization.pos_dedup_cdcx_stg)"], + post_hook = [" + {% + set final_table_relation = adapter.get_relation( + database=this.database, + schema=this.schema, + identifier='pos_dedup_cdcx' + ) + %} + {# + If the final table doesn't exist, then obviously we can't delete anything from it. + Also, after a reset, the final table is created without the _airbyte_unique_key column (this column is created during the first sync) + So skip this deletion if the column doesn't exist. (in this case, the table is guaranteed to be empty anyway) + #} + {% + if final_table_relation is not none and '_airbyte_unique_key' in adapter.get_columns_in_relation(final_table_relation)|map(attribute='name') + %} + -- Delete records which are no longer active: + -- This query is equivalent, but the left join version is more performant: + -- delete from final_table where unique_key in ( + -- select unique_key from scd_table where 1 = 1 + -- ) and unique_key not in ( + -- select unique_key from scd_table where active_row = 1 + -- ) + -- We're incremental against normalized_at rather than emitted_at because we need to fetch the SCD + -- entries that were _updated_ recently. This is because a deleted record will have an SCD record + -- which was emitted a long time ago, but recently re-normalized to have active_row = 0. + delete from {{ final_table_relation }} where {{ final_table_relation }}._airbyte_unique_key in ( + select recent_records.unique_key + from ( + select distinct _airbyte_unique_key as unique_key + from {{ this }} + where 1=1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('pos_dedup_cdcx')) }} + ) recent_records + left join ( + select _airbyte_unique_key as unique_key, count(_airbyte_unique_key) as active_count + from {{ this }} + where _airbyte_active_row = 1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('pos_dedup_cdcx')) }} + group by _airbyte_unique_key + ) active_counts + on recent_records.unique_key = active_counts.unique_key + where active_count is null or active_count = 0 + ) + {% else %} + -- We have to have a non-empty query, so just do a noop delete + delete from {{ this }} where 1=0 + {% endif %} + ","delete from _airbyte_test_normalization.pos_dedup_cdcx_stg where _airbyte_emitted_at != (select max(_airbyte_emitted_at) from _airbyte_test_normalization.pos_dedup_cdcx_stg)"], tags = [ "top-level" ] ) }} -- depends_on: ref('pos_dedup_cdcx_stg') @@ -15,7 +61,7 @@ new_data as ( from {{ ref('pos_dedup_cdcx_stg') }} -- pos_dedup_cdcx from {{ source('test_normalization', '_airbyte_raw_pos_dedup_cdcx') }} where 1 = 1 - {{ incremental_clause('_airbyte_emitted_at') }} + {{ incremental_clause('_airbyte_emitted_at', this) }} ), new_data_ids as ( -- build a subset of _airbyte_unique_key from rows that are new diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/renamed_dedup_cdc_excluded_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/renamed_dedup_cdc_excluded_scd.sql index 4fbd681d8ee6..d8da713c6871 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/renamed_dedup_cdc_excluded_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/renamed_dedup_cdc_excluded_scd.sql @@ -2,7 +2,53 @@ indexes = [{'columns':['_airbyte_active_row','_airbyte_unique_key_scd','_airbyte_emitted_at'],'type': 'btree'}], unique_key = "_airbyte_unique_key_scd", schema = "test_normalization", - post_hook = ["delete from _airbyte_test_normalization.renamed_dedup_cdc_excluded_stg where _airbyte_emitted_at != (select max(_airbyte_emitted_at) from _airbyte_test_normalization.renamed_dedup_cdc_excluded_stg)"], + post_hook = [" + {% + set final_table_relation = adapter.get_relation( + database=this.database, + schema=this.schema, + identifier='renamed_dedup_cdc_excluded' + ) + %} + {# + If the final table doesn't exist, then obviously we can't delete anything from it. + Also, after a reset, the final table is created without the _airbyte_unique_key column (this column is created during the first sync) + So skip this deletion if the column doesn't exist. (in this case, the table is guaranteed to be empty anyway) + #} + {% + if final_table_relation is not none and '_airbyte_unique_key' in adapter.get_columns_in_relation(final_table_relation)|map(attribute='name') + %} + -- Delete records which are no longer active: + -- This query is equivalent, but the left join version is more performant: + -- delete from final_table where unique_key in ( + -- select unique_key from scd_table where 1 = 1 + -- ) and unique_key not in ( + -- select unique_key from scd_table where active_row = 1 + -- ) + -- We're incremental against normalized_at rather than emitted_at because we need to fetch the SCD + -- entries that were _updated_ recently. This is because a deleted record will have an SCD record + -- which was emitted a long time ago, but recently re-normalized to have active_row = 0. + delete from {{ final_table_relation }} where {{ final_table_relation }}._airbyte_unique_key in ( + select recent_records.unique_key + from ( + select distinct _airbyte_unique_key as unique_key + from {{ this }} + where 1=1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('renamed_dedup_cdc_excluded')) }} + ) recent_records + left join ( + select _airbyte_unique_key as unique_key, count(_airbyte_unique_key) as active_count + from {{ this }} + where _airbyte_active_row = 1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('renamed_dedup_cdc_excluded')) }} + group by _airbyte_unique_key + ) active_counts + on recent_records.unique_key = active_counts.unique_key + where active_count is null or active_count = 0 + ) + {% else %} + -- We have to have a non-empty query, so just do a noop delete + delete from {{ this }} where 1=0 + {% endif %} + ","delete from _airbyte_test_normalization.renamed_dedup_cdc_excluded_stg where _airbyte_emitted_at != (select max(_airbyte_emitted_at) from _airbyte_test_normalization.renamed_dedup_cdc_excluded_stg)"], tags = [ "top-level" ] ) }} -- depends_on: ref('renamed_dedup_cdc_excluded_stg') @@ -15,7 +61,7 @@ new_data as ( from {{ ref('renamed_dedup_cdc_excluded_stg') }} -- renamed_dedup_cdc_excluded from {{ source('test_normalization', '_airbyte_raw_renamed_dedup_cdc_excluded') }} where 1 = 1 - {{ incremental_clause('_airbyte_emitted_at') }} + {{ incremental_clause('_airbyte_emitted_at', this) }} ), new_data_ids as ( -- build a subset of _airbyte_unique_key from rows that are new diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/1_prefix_startwith_number.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/1_prefix_startwith_number.sql index 77aba25edc2a..f3ea9897b65a 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/1_prefix_startwith_number.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/1_prefix_startwith_number.sql @@ -19,5 +19,5 @@ from {{ ref('1_prefix_startwith_number_scd') }} -- 1_prefix_startwith_number from {{ source('test_normalization', '_airbyte_raw_1_prefix_startwith_number') }} where 1 = 1 and _airbyte_active_row = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/1_prefix_startwith_number_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/1_prefix_startwith_number_stg.sql index 69bff1d44aaa..c387201c974c 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/1_prefix_startwith_number_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/1_prefix_startwith_number_stg.sql @@ -16,5 +16,5 @@ select from {{ ref('1_prefix_startwith_number_ab2') }} tmp -- 1_prefix_startwith_number where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_cdc_excluded.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_cdc_excluded.sql index 2de38510bde8..32d70c680aa9 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_cdc_excluded.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_cdc_excluded.sql @@ -21,5 +21,5 @@ from {{ ref('dedup_cdc_excluded_scd') }} -- dedup_cdc_excluded from {{ source('test_normalization', '_airbyte_raw_dedup_cdc_excluded') }} where 1 = 1 and _airbyte_active_row = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_cdc_excluded_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_cdc_excluded_stg.sql index 4b95e21267db..b0cd4bf7cb13 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_cdc_excluded_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_cdc_excluded_stg.sql @@ -18,5 +18,5 @@ select from {{ ref('dedup_cdc_excluded_ab2') }} tmp -- dedup_cdc_excluded where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql index 1ee7d74d027e..42f7540dc6b9 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql @@ -24,5 +24,5 @@ from {{ ref('dedup_exchange_rate_scd') }} -- dedup_exchange_rate from {{ source('test_normalization', '_airbyte_raw_dedup_exchange_rate') }} where 1 = 1 and _airbyte_active_row = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate_stg.sql index 62126d7b7c4e..f892feed3fe7 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate_stg.sql @@ -21,5 +21,5 @@ select from {{ ref('dedup_exchange_rate_ab2') }} tmp -- dedup_exchange_rate where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/multiple_column_names_conflicts.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/multiple_column_names_conflicts.sql index 9aa1f765c0c8..3451ce406b4d 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/multiple_column_names_conflicts.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/multiple_column_names_conflicts.sql @@ -23,5 +23,5 @@ from {{ ref('multiple_column_names_conflicts_scd') }} -- multiple_column_names_conflicts from {{ source('test_normalization', '_airbyte_raw_multiple_column_names_conflicts') }} where 1 = 1 and _airbyte_active_row = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/multiple_column_names_conflicts_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/multiple_column_names_conflicts_stg.sql index 85ac75357597..c549b49128a6 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/multiple_column_names_conflicts_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/multiple_column_names_conflicts_stg.sql @@ -20,5 +20,5 @@ select from {{ ref('multiple_column_names_conflicts_ab2') }} tmp -- multiple_column_names_conflicts where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/pos_dedup_cdcx.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/pos_dedup_cdcx.sql index 1d95d8a50338..57ddb1908b9d 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/pos_dedup_cdcx.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/pos_dedup_cdcx.sql @@ -22,5 +22,5 @@ from {{ ref('pos_dedup_cdcx_scd') }} -- pos_dedup_cdcx from {{ source('test_normalization', '_airbyte_raw_pos_dedup_cdcx') }} where 1 = 1 and _airbyte_active_row = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/pos_dedup_cdcx_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/pos_dedup_cdcx_stg.sql index 8fdd8e7d07f0..692867ceaf4e 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/pos_dedup_cdcx_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/pos_dedup_cdcx_stg.sql @@ -19,5 +19,5 @@ select from {{ ref('pos_dedup_cdcx_ab2') }} tmp -- pos_dedup_cdcx where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded.sql index ca5093eb3e17..603af9d4f80c 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded.sql @@ -18,5 +18,5 @@ from {{ ref('renamed_dedup_cdc_excluded_scd') }} -- renamed_dedup_cdc_excluded from {{ source('test_normalization', '_airbyte_raw_renamed_dedup_cdc_excluded') }} where 1 = 1 and _airbyte_active_row = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded_stg.sql index be9bbfcd8675..96371bb4931a 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded_stg.sql @@ -15,5 +15,5 @@ select from {{ ref('renamed_dedup_cdc_excluded_ab2') }} tmp -- renamed_dedup_cdc_excluded where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/dedup_cdc_excluded_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/dedup_cdc_excluded_ab1.sql new file mode 100644 index 000000000000..99a03831a8ba --- /dev/null +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/dedup_cdc_excluded_ab1.sql @@ -0,0 +1,22 @@ +{{ config( + indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}], + unique_key = '_airbyte_ab_id', + schema = "_airbyte_test_normalization", + tags = [ "top-level-intermediate" ] +) }} +-- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema +-- depends_on: {{ source('test_normalization', '_airbyte_raw_dedup_cdc_excluded') }} +select + {{ json_extract_scalar('_airbyte_data', ['id'], ['id']) }} as {{ adapter.quote('id') }}, + {{ json_extract_scalar('_airbyte_data', ['name'], ['name']) }} as {{ adapter.quote('name') }}, + {{ json_extract_scalar('_airbyte_data', ['_ab_cdc_lsn'], ['_ab_cdc_lsn']) }} as _ab_cdc_lsn, + {{ json_extract_scalar('_airbyte_data', ['_ab_cdc_updated_at'], ['_ab_cdc_updated_at']) }} as _ab_cdc_updated_at, + {{ json_extract_scalar('_airbyte_data', ['_ab_cdc_deleted_at'], ['_ab_cdc_deleted_at']) }} as _ab_cdc_deleted_at, + _airbyte_ab_id, + _airbyte_emitted_at, + {{ current_timestamp() }} as _airbyte_normalized_at +from {{ source('test_normalization', '_airbyte_raw_dedup_cdc_excluded') }} as table_alias +-- dedup_cdc_excluded +where 1 = 1 +{{ incremental_clause('_airbyte_emitted_at', this) }} + diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/dedup_cdc_excluded_ab2.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/dedup_cdc_excluded_ab2.sql new file mode 100644 index 000000000000..3d8803e27a66 --- /dev/null +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/dedup_cdc_excluded_ab2.sql @@ -0,0 +1,22 @@ +{{ config( + indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}], + unique_key = '_airbyte_ab_id', + schema = "_airbyte_test_normalization", + tags = [ "top-level-intermediate" ] +) }} +-- SQL model to cast each column to its adequate SQL type converted from the JSON schema type +-- depends_on: {{ ref('dedup_cdc_excluded_ab1') }} +select + cast({{ adapter.quote('id') }} as {{ dbt_utils.type_bigint() }}) as {{ adapter.quote('id') }}, + cast({{ adapter.quote('name') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('name') }}, + cast(_ab_cdc_lsn as {{ dbt_utils.type_float() }}) as _ab_cdc_lsn, + cast(_ab_cdc_updated_at as {{ dbt_utils.type_float() }}) as _ab_cdc_updated_at, + cast(_ab_cdc_deleted_at as {{ dbt_utils.type_float() }}) as _ab_cdc_deleted_at, + _airbyte_ab_id, + _airbyte_emitted_at, + {{ current_timestamp() }} as _airbyte_normalized_at +from {{ ref('dedup_cdc_excluded_ab1') }} +-- dedup_cdc_excluded +where 1 = 1 +{{ incremental_clause('_airbyte_emitted_at', this) }} + diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql index 23e1bb70c587..8dd3aff00d2c 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql @@ -21,5 +21,5 @@ select from {{ source('test_normalization', '_airbyte_raw_dedup_exchange_rate') }} as table_alias -- dedup_exchange_rate where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql index b43312b67ebf..b5e700b36aa6 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql @@ -21,5 +21,5 @@ select from {{ ref('dedup_exchange_rate_ab1') }} -- dedup_exchange_rate where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/renamed_dedup_cdc_excluded_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/renamed_dedup_cdc_excluded_ab1.sql index 590e1e755b5c..dfa39c2a71eb 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/renamed_dedup_cdc_excluded_ab1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/renamed_dedup_cdc_excluded_ab1.sql @@ -18,5 +18,5 @@ select from {{ source('test_normalization', '_airbyte_raw_renamed_dedup_cdc_excluded') }} as table_alias -- renamed_dedup_cdc_excluded where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/renamed_dedup_cdc_excluded_ab2.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/renamed_dedup_cdc_excluded_ab2.sql index 0718ac05fcbf..72f80140e007 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/renamed_dedup_cdc_excluded_ab2.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/renamed_dedup_cdc_excluded_ab2.sql @@ -18,5 +18,5 @@ select from {{ ref('renamed_dedup_cdc_excluded_ab1') }} -- renamed_dedup_cdc_excluded where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_incremental/scd/test_normalization/dedup_cdc_excluded_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_incremental/scd/test_normalization/dedup_cdc_excluded_scd.sql new file mode 100644 index 000000000000..5affe9825e3b --- /dev/null +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_incremental/scd/test_normalization/dedup_cdc_excluded_scd.sql @@ -0,0 +1,169 @@ +{{ config( + indexes = [{'columns':['_airbyte_active_row','_airbyte_unique_key_scd','_airbyte_emitted_at'],'type': 'btree'}], + unique_key = "_airbyte_unique_key_scd", + schema = "test_normalization", + post_hook = [" + {% + set final_table_relation = adapter.get_relation( + database=this.database, + schema=this.schema, + identifier='dedup_cdc_excluded' + ) + %} + {# + If the final table doesn't exist, then obviously we can't delete anything from it. + Also, after a reset, the final table is created without the _airbyte_unique_key column (this column is created during the first sync) + So skip this deletion if the column doesn't exist. (in this case, the table is guaranteed to be empty anyway) + #} + {% + if final_table_relation is not none and '_airbyte_unique_key' in adapter.get_columns_in_relation(final_table_relation)|map(attribute='name') + %} + -- Delete records which are no longer active: + -- This query is equivalent, but the left join version is more performant: + -- delete from final_table where unique_key in ( + -- select unique_key from scd_table where 1 = 1 + -- ) and unique_key not in ( + -- select unique_key from scd_table where active_row = 1 + -- ) + -- We're incremental against normalized_at rather than emitted_at because we need to fetch the SCD + -- entries that were _updated_ recently. This is because a deleted record will have an SCD record + -- which was emitted a long time ago, but recently re-normalized to have active_row = 0. + delete from {{ final_table_relation }} where {{ final_table_relation }}._airbyte_unique_key in ( + select recent_records.unique_key + from ( + select distinct _airbyte_unique_key as unique_key + from {{ this }} + where 1=1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('dedup_cdc_excluded')) }} + ) recent_records + left join ( + select _airbyte_unique_key as unique_key, count(_airbyte_unique_key) as active_count + from {{ this }} + where _airbyte_active_row = 1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('dedup_cdc_excluded')) }} + group by _airbyte_unique_key + ) active_counts + on recent_records.unique_key = active_counts.unique_key + where active_count is null or active_count = 0 + ) + {% else %} + -- We have to have a non-empty query, so just do a noop delete + delete from {{ this }} where 1=0 + {% endif %} + ","delete from _airbyte_test_normalization.dedup_cdc_excluded_stg where _airbyte_emitted_at != (select max(_airbyte_emitted_at) from _airbyte_test_normalization.dedup_cdc_excluded_stg)"], + tags = [ "top-level" ] +) }} +-- depends_on: ref('dedup_cdc_excluded_stg') +with +{% if is_incremental() %} +new_data as ( + -- retrieve incremental "new" data + select + * + from {{ ref('dedup_cdc_excluded_stg') }} + -- dedup_cdc_excluded from {{ source('test_normalization', '_airbyte_raw_dedup_cdc_excluded') }} + where 1 = 1 + {{ incremental_clause('_airbyte_emitted_at', this) }} +), +new_data_ids as ( + -- build a subset of _airbyte_unique_key from rows that are new + select distinct + {{ dbt_utils.surrogate_key([ + adapter.quote('id'), + ]) }} as _airbyte_unique_key + from new_data +), +empty_new_data as ( + -- build an empty table to only keep the table's column types + select * from new_data where 1 = 0 +), +previous_active_scd_data as ( + -- retrieve "incomplete old" data that needs to be updated with an end date because of new changes + select + {{ star_intersect(ref('dedup_cdc_excluded_stg'), this, from_alias='inc_data', intersect_alias='this_data') }} + from {{ this }} as this_data + -- make a join with new_data using primary key to filter active data that need to be updated only + join new_data_ids on this_data._airbyte_unique_key = new_data_ids._airbyte_unique_key + -- force left join to NULL values (we just need to transfer column types only for the star_intersect macro on schema changes) + left join empty_new_data as inc_data on this_data._airbyte_ab_id = inc_data._airbyte_ab_id + where _airbyte_active_row = 1 +), +input_data as ( + select {{ dbt_utils.star(ref('dedup_cdc_excluded_stg')) }} from new_data + union all + select {{ dbt_utils.star(ref('dedup_cdc_excluded_stg')) }} from previous_active_scd_data +), +{% else %} +input_data as ( + select * + from {{ ref('dedup_cdc_excluded_stg') }} + -- dedup_cdc_excluded from {{ source('test_normalization', '_airbyte_raw_dedup_cdc_excluded') }} +), +{% endif %} +scd_data as ( + -- SQL model to build a Type 2 Slowly Changing Dimension (SCD) table for each record identified by their primary key + select + {{ dbt_utils.surrogate_key([ + adapter.quote('id'), + ]) }} as _airbyte_unique_key, + {{ adapter.quote('id') }}, + {{ adapter.quote('name') }}, + _ab_cdc_lsn, + _ab_cdc_updated_at, + _ab_cdc_deleted_at, + _ab_cdc_lsn as _airbyte_start_at, + lag(_ab_cdc_lsn) over ( + partition by {{ adapter.quote('id') }} + order by + _ab_cdc_lsn is null asc, + _ab_cdc_lsn desc, + _ab_cdc_updated_at desc, + _airbyte_emitted_at desc + ) as _airbyte_end_at, + case when row_number() over ( + partition by {{ adapter.quote('id') }} + order by + _ab_cdc_lsn is null asc, + _ab_cdc_lsn desc, + _ab_cdc_updated_at desc, + _airbyte_emitted_at desc + ) = 1 and _ab_cdc_deleted_at is null then 1 else 0 end as _airbyte_active_row, + _airbyte_ab_id, + _airbyte_emitted_at, + _airbyte_dedup_cdc_excluded_hashid + from input_data +), +dedup_data as ( + select + -- we need to ensure de-duplicated rows for merge/update queries + -- additionally, we generate a unique key for the scd table + row_number() over ( + partition by + _airbyte_unique_key, + _airbyte_start_at, + _airbyte_emitted_at, cast(_ab_cdc_deleted_at as {{ dbt_utils.type_string() }}), cast(_ab_cdc_updated_at as {{ dbt_utils.type_string() }}) + order by _airbyte_active_row desc, _airbyte_ab_id + ) as _airbyte_row_num, + {{ dbt_utils.surrogate_key([ + '_airbyte_unique_key', + '_airbyte_start_at', + '_airbyte_emitted_at', '_ab_cdc_deleted_at', '_ab_cdc_updated_at' + ]) }} as _airbyte_unique_key_scd, + scd_data.* + from scd_data +) +select + _airbyte_unique_key, + _airbyte_unique_key_scd, + {{ adapter.quote('id') }}, + {{ adapter.quote('name') }}, + _ab_cdc_lsn, + _ab_cdc_updated_at, + _ab_cdc_deleted_at, + _airbyte_start_at, + _airbyte_end_at, + _airbyte_active_row, + _airbyte_ab_id, + _airbyte_emitted_at, + {{ current_timestamp() }} as _airbyte_normalized_at, + _airbyte_dedup_cdc_excluded_hashid +from dedup_data where _airbyte_row_num = 1 + diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql index bf5adb993db9..7e6225fb7cfc 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql @@ -2,7 +2,53 @@ indexes = [{'columns':['_airbyte_active_row','_airbyte_unique_key_scd','_airbyte_emitted_at'],'type': 'btree'}], unique_key = "_airbyte_unique_key_scd", schema = "test_normalization", - post_hook = ["delete from _airbyte_test_normalization.dedup_exchange_rate_stg where _airbyte_emitted_at != (select max(_airbyte_emitted_at) from _airbyte_test_normalization.dedup_exchange_rate_stg)"], + post_hook = [" + {% + set final_table_relation = adapter.get_relation( + database=this.database, + schema=this.schema, + identifier='dedup_exchange_rate' + ) + %} + {# + If the final table doesn't exist, then obviously we can't delete anything from it. + Also, after a reset, the final table is created without the _airbyte_unique_key column (this column is created during the first sync) + So skip this deletion if the column doesn't exist. (in this case, the table is guaranteed to be empty anyway) + #} + {% + if final_table_relation is not none and '_airbyte_unique_key' in adapter.get_columns_in_relation(final_table_relation)|map(attribute='name') + %} + -- Delete records which are no longer active: + -- This query is equivalent, but the left join version is more performant: + -- delete from final_table where unique_key in ( + -- select unique_key from scd_table where 1 = 1 + -- ) and unique_key not in ( + -- select unique_key from scd_table where active_row = 1 + -- ) + -- We're incremental against normalized_at rather than emitted_at because we need to fetch the SCD + -- entries that were _updated_ recently. This is because a deleted record will have an SCD record + -- which was emitted a long time ago, but recently re-normalized to have active_row = 0. + delete from {{ final_table_relation }} where {{ final_table_relation }}._airbyte_unique_key in ( + select recent_records.unique_key + from ( + select distinct _airbyte_unique_key as unique_key + from {{ this }} + where 1=1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('dedup_exchange_rate')) }} + ) recent_records + left join ( + select _airbyte_unique_key as unique_key, count(_airbyte_unique_key) as active_count + from {{ this }} + where _airbyte_active_row = 1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('dedup_exchange_rate')) }} + group by _airbyte_unique_key + ) active_counts + on recent_records.unique_key = active_counts.unique_key + where active_count is null or active_count = 0 + ) + {% else %} + -- We have to have a non-empty query, so just do a noop delete + delete from {{ this }} where 1=0 + {% endif %} + ","delete from _airbyte_test_normalization.dedup_exchange_rate_stg where _airbyte_emitted_at != (select max(_airbyte_emitted_at) from _airbyte_test_normalization.dedup_exchange_rate_stg)"], tags = [ "top-level" ] ) }} -- depends_on: ref('dedup_exchange_rate_stg') @@ -15,7 +61,7 @@ new_data as ( from {{ ref('dedup_exchange_rate_stg') }} -- dedup_exchange_rate from {{ source('test_normalization', '_airbyte_raw_dedup_exchange_rate') }} where 1 = 1 - {{ incremental_clause('_airbyte_emitted_at') }} + {{ incremental_clause('_airbyte_emitted_at', this) }} ), new_data_ids as ( -- build a subset of _airbyte_unique_key from rows that are new diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_incremental/scd/test_normalization/renamed_dedup_cdc_excluded_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_incremental/scd/test_normalization/renamed_dedup_cdc_excluded_scd.sql index c0bcd34d3202..96f720b3d265 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_incremental/scd/test_normalization/renamed_dedup_cdc_excluded_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_incremental/scd/test_normalization/renamed_dedup_cdc_excluded_scd.sql @@ -2,7 +2,53 @@ indexes = [{'columns':['_airbyte_active_row','_airbyte_unique_key_scd','_airbyte_emitted_at'],'type': 'btree'}], unique_key = "_airbyte_unique_key_scd", schema = "test_normalization", - post_hook = ["delete from _airbyte_test_normalization.renamed_dedup_cdc_excluded_stg where _airbyte_emitted_at != (select max(_airbyte_emitted_at) from _airbyte_test_normalization.renamed_dedup_cdc_excluded_stg)"], + post_hook = [" + {% + set final_table_relation = adapter.get_relation( + database=this.database, + schema=this.schema, + identifier='renamed_dedup_cdc_excluded' + ) + %} + {# + If the final table doesn't exist, then obviously we can't delete anything from it. + Also, after a reset, the final table is created without the _airbyte_unique_key column (this column is created during the first sync) + So skip this deletion if the column doesn't exist. (in this case, the table is guaranteed to be empty anyway) + #} + {% + if final_table_relation is not none and '_airbyte_unique_key' in adapter.get_columns_in_relation(final_table_relation)|map(attribute='name') + %} + -- Delete records which are no longer active: + -- This query is equivalent, but the left join version is more performant: + -- delete from final_table where unique_key in ( + -- select unique_key from scd_table where 1 = 1 + -- ) and unique_key not in ( + -- select unique_key from scd_table where active_row = 1 + -- ) + -- We're incremental against normalized_at rather than emitted_at because we need to fetch the SCD + -- entries that were _updated_ recently. This is because a deleted record will have an SCD record + -- which was emitted a long time ago, but recently re-normalized to have active_row = 0. + delete from {{ final_table_relation }} where {{ final_table_relation }}._airbyte_unique_key in ( + select recent_records.unique_key + from ( + select distinct _airbyte_unique_key as unique_key + from {{ this }} + where 1=1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('renamed_dedup_cdc_excluded')) }} + ) recent_records + left join ( + select _airbyte_unique_key as unique_key, count(_airbyte_unique_key) as active_count + from {{ this }} + where _airbyte_active_row = 1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('renamed_dedup_cdc_excluded')) }} + group by _airbyte_unique_key + ) active_counts + on recent_records.unique_key = active_counts.unique_key + where active_count is null or active_count = 0 + ) + {% else %} + -- We have to have a non-empty query, so just do a noop delete + delete from {{ this }} where 1=0 + {% endif %} + ","delete from _airbyte_test_normalization.renamed_dedup_cdc_excluded_stg where _airbyte_emitted_at != (select max(_airbyte_emitted_at) from _airbyte_test_normalization.renamed_dedup_cdc_excluded_stg)"], tags = [ "top-level" ] ) }} -- depends_on: ref('renamed_dedup_cdc_excluded_stg') @@ -15,7 +61,7 @@ new_data as ( from {{ ref('renamed_dedup_cdc_excluded_stg') }} -- renamed_dedup_cdc_excluded from {{ source('test_normalization', '_airbyte_raw_renamed_dedup_cdc_excluded') }} where 1 = 1 - {{ incremental_clause('_airbyte_emitted_at') }} + {{ incremental_clause('_airbyte_emitted_at', this) }} ), new_data_ids as ( -- build a subset of _airbyte_unique_key from rows that are new diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_incremental/test_normalization/dedup_cdc_excluded.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_incremental/test_normalization/dedup_cdc_excluded.sql new file mode 100644 index 000000000000..32d70c680aa9 --- /dev/null +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_incremental/test_normalization/dedup_cdc_excluded.sql @@ -0,0 +1,25 @@ +{{ config( + indexes = [{'columns':['_airbyte_unique_key'],'unique':True}], + unique_key = "_airbyte_unique_key", + schema = "test_normalization", + tags = [ "top-level" ] +) }} +-- Final base SQL model +-- depends_on: {{ ref('dedup_cdc_excluded_scd') }} +select + _airbyte_unique_key, + {{ adapter.quote('id') }}, + {{ adapter.quote('name') }}, + _ab_cdc_lsn, + _ab_cdc_updated_at, + _ab_cdc_deleted_at, + _airbyte_ab_id, + _airbyte_emitted_at, + {{ current_timestamp() }} as _airbyte_normalized_at, + _airbyte_dedup_cdc_excluded_hashid +from {{ ref('dedup_cdc_excluded_scd') }} +-- dedup_cdc_excluded from {{ source('test_normalization', '_airbyte_raw_dedup_cdc_excluded') }} +where 1 = 1 +and _airbyte_active_row = 1 +{{ incremental_clause('_airbyte_emitted_at', this) }} + diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_incremental/test_normalization/dedup_cdc_excluded_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_incremental/test_normalization/dedup_cdc_excluded_stg.sql new file mode 100644 index 000000000000..b0cd4bf7cb13 --- /dev/null +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_incremental/test_normalization/dedup_cdc_excluded_stg.sql @@ -0,0 +1,22 @@ +{{ config( + indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}], + unique_key = '_airbyte_ab_id', + schema = "_airbyte_test_normalization", + tags = [ "top-level-intermediate" ] +) }} +-- SQL model to build a hash column based on the values of this record +-- depends_on: {{ ref('dedup_cdc_excluded_ab2') }} +select + {{ dbt_utils.surrogate_key([ + adapter.quote('id'), + adapter.quote('name'), + '_ab_cdc_lsn', + '_ab_cdc_updated_at', + '_ab_cdc_deleted_at', + ]) }} as _airbyte_dedup_cdc_excluded_hashid, + tmp.* +from {{ ref('dedup_cdc_excluded_ab2') }} tmp +-- dedup_cdc_excluded +where 1 = 1 +{{ incremental_clause('_airbyte_emitted_at', this) }} + diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql index 8529ede3dcfa..3e51ad4d7256 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql @@ -24,5 +24,5 @@ from {{ ref('dedup_exchange_rate_scd') }} -- dedup_exchange_rate from {{ source('test_normalization', '_airbyte_raw_dedup_exchange_rate') }} where 1 = 1 and _airbyte_active_row = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate_stg.sql index a4c1e8816f8a..35c866ac4d36 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate_stg.sql @@ -21,5 +21,5 @@ select from {{ ref('dedup_exchange_rate_ab2') }} tmp -- dedup_exchange_rate where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded.sql index 80ff3fc2138c..672118dcf045 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded.sql @@ -21,5 +21,5 @@ from {{ ref('renamed_dedup_cdc_excluded_scd') }} -- renamed_dedup_cdc_excluded from {{ source('test_normalization', '_airbyte_raw_renamed_dedup_cdc_excluded') }} where 1 = 1 and _airbyte_active_row = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded_stg.sql index 86d0e6f4451d..b2d5002b934a 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded_stg.sql @@ -18,5 +18,5 @@ select from {{ ref('renamed_dedup_cdc_excluded_ab2') }} tmp -- renamed_dedup_cdc_excluded where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/sources.yml b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/sources.yml index dd538a80131a..79ad1a1bb5c5 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/sources.yml +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/modified_models/generated/sources.yml @@ -1,11 +1,12 @@ version: 2 sources: -- name: test_normalization - quoting: - database: true - schema: false - identifier: false - tables: - - name: _airbyte_raw_dedup_exchange_rate - - name: _airbyte_raw_exchange_rate - - name: _airbyte_raw_renamed_dedup_cdc_excluded + - name: test_normalization + quoting: + database: true + schema: false + identifier: false + tables: + - name: _airbyte_raw_dedup_cdc_excluded + - name: _airbyte_raw_dedup_exchange_rate + - name: _airbyte_raw_exchange_rate + - name: _airbyte_raw_renamed_dedup_cdc_excluded diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_tables/test_normalization/exchange_rate.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_tables/test_normalization/exchange_rate.sql index 7d795f97e67e..2a24e704fda2 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_tables/test_normalization/exchange_rate.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_tables/test_normalization/exchange_rate.sql @@ -31,9 +31,7 @@ select cast("id" as bigint ) as "id", - cast(currency as - varchar -) as currency, + cast(currency as text) as currency, cast(nullif("date", '') as date ) as "date", @@ -43,18 +41,14 @@ select cast("HKD@spƩƧiƤl & characters" as float ) as "HKD@spƩƧiƤl & characters", - cast(hkd_special___characters as - varchar -) as hkd_special___characters, + cast(hkd_special___characters as text) as hkd_special___characters, cast(nzd as float ) as nzd, cast(usd as float ) as usd, - cast("column`_'with""_quotes" as - varchar -) as "column`_'with""_quotes", + cast("column`_'with""_quotes" as text) as "column`_'with""_quotes", _airbyte_ab_id, _airbyte_emitted_at, now() as _airbyte_normalized_at @@ -66,27 +60,7 @@ where 1 = 1 -- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__exchange_rate_ab2 select - md5(cast(coalesce(cast("id" as - varchar -), '') || '-' || coalesce(cast(currency as - varchar -), '') || '-' || coalesce(cast("date" as - varchar -), '') || '-' || coalesce(cast(timestamp_col as - varchar -), '') || '-' || coalesce(cast("HKD@spƩƧiƤl & characters" as - varchar -), '') || '-' || coalesce(cast(hkd_special___characters as - varchar -), '') || '-' || coalesce(cast(nzd as - varchar -), '') || '-' || coalesce(cast(usd as - varchar -), '') || '-' || coalesce(cast("column`_'with""_quotes" as - varchar -), '') as - varchar -)) as _airbyte_exchange_rate_hashid, + md5(cast(coalesce(cast("id" as text), '') || '-' || coalesce(cast(currency as text), '') || '-' || coalesce(cast("date" as text), '') || '-' || coalesce(cast(timestamp_col as text), '') || '-' || coalesce(cast("HKD@spƩƧiƤl & characters" as text), '') || '-' || coalesce(cast(hkd_special___characters as text), '') || '-' || coalesce(cast(nzd as text), '') || '-' || coalesce(cast(usd as text), '') || '-' || coalesce(cast("column`_'with""_quotes" as text), '') as text)) as _airbyte_exchange_rate_hashid, tmp.* from __dbt__cte__exchange_rate_ab2 tmp -- exchange_rate diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_incremental/scd/test_normalization/dedup_cdc_excluded_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_incremental/scd/test_normalization/dedup_cdc_excluded_scd.sql new file mode 100644 index 000000000000..a1fba0a6d7ff --- /dev/null +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_incremental/scd/test_normalization/dedup_cdc_excluded_scd.sql @@ -0,0 +1,15 @@ + + + delete from "postgres".test_normalization."dedup_cdc_excluded_scd" + where (_airbyte_unique_key_scd) in ( + select (_airbyte_unique_key_scd) + from "dedup_cdc_excluded_scd__dbt_tmp" + ); + + + insert into "postgres".test_normalization."dedup_cdc_excluded_scd" ("_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "name", "_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_cdc_excluded_hashid") + ( + select "_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "name", "_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_cdc_excluded_hashid" + from "dedup_cdc_excluded_scd__dbt_tmp" + ) + \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_incremental/test_normalization/dedup_cdc_excluded.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_incremental/test_normalization/dedup_cdc_excluded.sql new file mode 100644 index 000000000000..b3012059b462 --- /dev/null +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_incremental/test_normalization/dedup_cdc_excluded.sql @@ -0,0 +1,15 @@ + + + delete from "postgres".test_normalization."dedup_cdc_excluded" + where (_airbyte_unique_key) in ( + select (_airbyte_unique_key) + from "dedup_cdc_excluded__dbt_tmp" + ); + + + insert into "postgres".test_normalization."dedup_cdc_excluded" ("_airbyte_unique_key", "id", "name", "_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_cdc_excluded_hashid") + ( + select "_airbyte_unique_key", "id", "name", "_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_cdc_excluded_hashid" + from "dedup_cdc_excluded__dbt_tmp" + ) + \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_incremental/test_normalization/dedup_cdc_excluded_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_incremental/test_normalization/dedup_cdc_excluded_stg.sql new file mode 100644 index 000000000000..d9f833d441bf --- /dev/null +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_incremental/test_normalization/dedup_cdc_excluded_stg.sql @@ -0,0 +1,15 @@ + + + delete from "postgres"._airbyte_test_normalization."dedup_cdc_excluded_stg" + where (_airbyte_ab_id) in ( + select (_airbyte_ab_id) + from "dedup_cdc_excluded_stg__dbt_tmp" + ); + + + insert into "postgres"._airbyte_test_normalization."dedup_cdc_excluded_stg" ("_airbyte_dedup_cdc_excluded_hashid", "id", "name", "_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at") + ( + select "_airbyte_dedup_cdc_excluded_hashid", "id", "name", "_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at" + from "dedup_cdc_excluded_stg__dbt_tmp" + ) + \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_tables/test_normalization/exchange_rate.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_tables/test_normalization/exchange_rate.sql index d7f0d50be215..155df4698f2d 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_tables/test_normalization/exchange_rate.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_tables/test_normalization/exchange_rate.sql @@ -31,9 +31,7 @@ select cast("id" as float ) as "id", - cast(currency as - varchar -) as currency, + cast(currency as text) as currency, cast(new_column as float ) as new_column, @@ -52,9 +50,7 @@ select cast(usd as float ) as usd, - cast("column`_'with""_quotes" as - varchar -) as "column`_'with""_quotes", + cast("column`_'with""_quotes" as text) as "column`_'with""_quotes", _airbyte_ab_id, _airbyte_emitted_at, now() as _airbyte_normalized_at @@ -66,27 +62,7 @@ where 1 = 1 -- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__exchange_rate_ab2 select - md5(cast(coalesce(cast("id" as - varchar -), '') || '-' || coalesce(cast(currency as - varchar -), '') || '-' || coalesce(cast(new_column as - varchar -), '') || '-' || coalesce(cast("date" as - varchar -), '') || '-' || coalesce(cast(timestamp_col as - varchar -), '') || '-' || coalesce(cast("HKD@spƩƧiƤl & characters" as - varchar -), '') || '-' || coalesce(cast(nzd as - varchar -), '') || '-' || coalesce(cast(usd as - varchar -), '') || '-' || coalesce(cast("column`_'with""_quotes" as - varchar -), '') as - varchar -)) as _airbyte_exchange_rate_hashid, + md5(cast(coalesce(cast("id" as text), '') || '-' || coalesce(cast(currency as text), '') || '-' || coalesce(cast(new_column as text), '') || '-' || coalesce(cast("date" as text), '') || '-' || coalesce(cast(timestamp_col as text), '') || '-' || coalesce(cast("HKD@spƩƧiƤl & characters" as text), '') || '-' || coalesce(cast(nzd as text), '') || '-' || coalesce(cast(usd as text), '') || '-' || coalesce(cast("column`_'with""_quotes" as text), '') as text)) as _airbyte_exchange_rate_hashid, tmp.* from __dbt__cte__exchange_rate_ab2 tmp -- exchange_rate diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/dbt_project.yml b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/dbt_project.yml index 7631ef356dc9..12f7ddeae970 100755 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/dbt_project.yml +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/dbt_project.yml @@ -1,46 +1,32 @@ -# This file is necessary to install dbt-utils with dbt deps -# the content will be overwritten by the transform function - -# Name your package! Package names should contain only lowercase characters -# and underscores. A good package name should reflect your organization's -# name or the intended use of these models -name: "airbyte_utils" +name: airbyte_utils version: "1.0" config-version: 2 - -# This setting configures which "profile" dbt uses for this project. Profiles contain -# database connection information, and should be configured in the ~/.dbt/profiles.yml file -profile: "normalize" - -# These configurations specify where dbt should look for different types of files. -# The `model-paths` config, for example, states that source models can be found -# in the "models/" directory. You probably won't need to change these! -model-paths: ["models"] -docs-paths: ["docs"] -analysis-paths: ["analysis"] -test-paths: ["tests"] -seed-paths: ["data"] -macro-paths: ["macros"] - -target-path: "../build" # directory which will store compiled SQL files -log-path: "../logs" # directory which will store DBT logs -packages-install-path: "/dbt" # directory which will store external DBT dependencies - -clean-targets: # directories to be removed by `dbt clean` - - "build" - - "dbt_modules" - +profile: normalize +model-paths: + - models +docs-paths: + - docs +analysis-paths: + - analysis +test-paths: + - tests +seed-paths: + - data +macro-paths: + - macros +target-path: ../build +log-path: ../logs +packages-install-path: /dbt +clean-targets: + - build + - dbt_modules quoting: database: true - # Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) - # all schemas should be unquoted schema: false identifier: true - -# You can define configurations for models in the `model-paths` directory here. -# Using these configurations, you can enable or disable models, change how they -# are materialized, and more! models: + +transient: false + +pre-hook: SET enable_case_sensitive_identifier to TRUE airbyte_utils: +materialized: table generated: @@ -57,7 +43,77 @@ models: airbyte_views: +tags: airbyte_internal_views +materialized: view - dispatch: - macro_namespace: dbt_utils - search_order: ["airbyte_utils", "dbt_utils"] + search_order: + - airbyte_utils + - dbt_utils +vars: + json_column: _airbyte_data + models_to_source: + nested_stream_with_complex_columns_resulting_into_long_names_ab1: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_complex_columns_resulting_into_long_names_ab2: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_complex_columns_resulting_into_long_names_stg: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_complex_columns_resulting_into_long_names_scd: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_complex_columns_resulting_into_long_names: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + non_nested_stream_without_namespace_resulting_into_long_names_ab1: test_normalization._airbyte_raw_non_nested_stream_without_namespace_resulting_into_long_names + non_nested_stream_without_namespace_resulting_into_long_names_ab2: test_normalization._airbyte_raw_non_nested_stream_without_namespace_resulting_into_long_names + non_nested_stream_without_namespace_resulting_into_long_names_ab3: test_normalization._airbyte_raw_non_nested_stream_without_namespace_resulting_into_long_names + non_nested_stream_without_namespace_resulting_into_long_names: test_normalization._airbyte_raw_non_nested_stream_without_namespace_resulting_into_long_names + some_stream_that_was_empty_ab1: test_normalization._airbyte_raw_some_stream_that_was_empty + some_stream_that_was_empty_ab2: test_normalization._airbyte_raw_some_stream_that_was_empty + some_stream_that_was_empty_stg: test_normalization._airbyte_raw_some_stream_that_was_empty + some_stream_that_was_empty_scd: test_normalization._airbyte_raw_some_stream_that_was_empty + some_stream_that_was_empty: test_normalization._airbyte_raw_some_stream_that_was_empty + simple_stream_with_namespace_resulting_into_long_names_ab1: test_normalization_namespace._airbyte_raw_simple_stream_with_namespace_resulting_into_long_names + simple_stream_with_namespace_resulting_into_long_names_ab2: test_normalization_namespace._airbyte_raw_simple_stream_with_namespace_resulting_into_long_names + simple_stream_with_namespace_resulting_into_long_names_ab3: test_normalization_namespace._airbyte_raw_simple_stream_with_namespace_resulting_into_long_names + simple_stream_with_namespace_resulting_into_long_names: test_normalization_namespace._airbyte_raw_simple_stream_with_namespace_resulting_into_long_names + conflict_stream_name_ab1: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name_ab2: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name_ab3: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_scalar_ab1: test_normalization._airbyte_raw_conflict_stream_scalar + conflict_stream_scalar_ab2: test_normalization._airbyte_raw_conflict_stream_scalar + conflict_stream_scalar_ab3: test_normalization._airbyte_raw_conflict_stream_scalar + conflict_stream_scalar: test_normalization._airbyte_raw_conflict_stream_scalar + conflict_stream_array_ab1: test_normalization._airbyte_raw_conflict_stream_array + conflict_stream_array_ab2: test_normalization._airbyte_raw_conflict_stream_array + conflict_stream_array_ab3: test_normalization._airbyte_raw_conflict_stream_array + conflict_stream_array: test_normalization._airbyte_raw_conflict_stream_array + unnest_alias_ab1: test_normalization._airbyte_raw_unnest_alias + unnest_alias_ab2: test_normalization._airbyte_raw_unnest_alias + unnest_alias_ab3: test_normalization._airbyte_raw_unnest_alias + unnest_alias: test_normalization._airbyte_raw_unnest_alias + nested_stream_with_complex_columns_resulting_into_long_names_partition_ab1: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_complex_columns_resulting_into_long_names_partition_ab2: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_complex_columns_resulting_into_long_names_partition_ab3: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_complex_columns_resulting_into_long_names_partition: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + conflict_stream_name_conflict_stream_name_ab1: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name_conflict_stream_name_ab2: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name_conflict_stream_name_ab3: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name_conflict_stream_name: test_normalization._airbyte_raw_conflict_stream_name + unnest_alias_children_ab1: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children_ab2: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children_ab3: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children: test_normalization._airbyte_raw_unnest_alias + nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data_ab1: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data_ab2: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data_ab3: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_complex_columns_resulting_into_long_names_partition_data_ab1: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_complex_columns_resulting_into_long_names_partition_data_ab2: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_complex_columns_resulting_into_long_names_partition_data_ab3: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + nested_stream_with_complex_columns_resulting_into_long_names_partition_data: test_normalization._airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names + conflict_stream_name_conflict_stream_name_conflict_stream_name_ab1: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name_conflict_stream_name_conflict_stream_name_ab2: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name_conflict_stream_name_conflict_stream_name_ab3: test_normalization._airbyte_raw_conflict_stream_name + conflict_stream_name_conflict_stream_name_conflict_stream_name: test_normalization._airbyte_raw_conflict_stream_name + unnest_alias_children_owner_ab1: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children_owner_ab2: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children_owner_ab3: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children_owner: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children_owner_column___with__quotes_ab1: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children_owner_column___with__quotes_ab2: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children_owner_column___with__quotes_ab3: test_normalization._airbyte_raw_unnest_alias + unnest_alias_children_owner_column___with__quotes: test_normalization._airbyte_raw_unnest_alias diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/scd/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/scd/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_scd.sql index caeba18c2477..753b62319771 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/scd/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/scd/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_scd.sql @@ -21,7 +21,7 @@ input_data as ( scd_data as ( -- SQL model to build a Type 2 Slowly Changing Dimension (SCD) table for each record identified by their primary key select - md5(cast(coalesce(cast(id as varchar), '') as varchar)) as _airbyte_unique_key, + md5(cast(coalesce(cast(id as text), '') as text)) as _airbyte_unique_key, id, date, "partition", @@ -56,7 +56,7 @@ dedup_data as ( _airbyte_emitted_at order by _airbyte_active_row desc, _airbyte_ab_id ) as _airbyte_row_num, - md5(cast(coalesce(cast(_airbyte_unique_key as varchar), '') || '-' || coalesce(cast(_airbyte_start_at as varchar), '') || '-' || coalesce(cast(_airbyte_emitted_at as varchar), '') as varchar)) as _airbyte_unique_key_scd, + md5(cast(coalesce(cast(_airbyte_unique_key as text), '') || '-' || coalesce(cast(_airbyte_start_at as text), '') || '-' || coalesce(cast(_airbyte_emitted_at as text), '') as text)) as _airbyte_unique_key_scd, scd_data.* from scd_data ) diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition.sql index 25a5f72a235c..8348fdeb8132 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition.sql @@ -15,8 +15,8 @@ with __dbt__cte__nested_stream_with_complex_columns_resulting_into_long_names_pa -- depends_on: "integrationtests".test_normalization."nested_stream_with_complex_columns_resulting_into_long_names_scd" select _airbyte_nested_stream_with_complex_columns_resulting_into_long_names_hashid, - json_extract_path_text("partition", 'double_array_data', true) as double_array_data, - json_extract_path_text("partition", 'DATA', true) as data, + "partition"."double_array_data" as double_array_data, + "partition"."DATA" as data, _airbyte_ab_id, _airbyte_emitted_at, getdate() as _airbyte_normalized_at @@ -45,7 +45,7 @@ where 1 = 1 -- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__nested_stream_with_complex_columns_resulting_into_long_names_partition_ab2 select - md5(cast(coalesce(cast(_airbyte_nested_stream_with_complex_columns_resulting_into_long_names_hashid as varchar), '') || '-' || coalesce(cast(double_array_data as varchar), '') || '-' || coalesce(cast(data as varchar), '') as varchar)) as _airbyte_partition_hashid, + md5(cast(coalesce(cast(_airbyte_nested_stream_with_complex_columns_resulting_into_long_names_hashid as text), '') || '-' || coalesce(cast(json_serialize(double_array_data) as text), '') || '-' || coalesce(cast(json_serialize(data) as text), '') as text)) as _airbyte_partition_hashid, tmp.* from __dbt__cte__nested_stream_with_complex_columns_resulting_into_long_names_partition_ab2 tmp -- partition at nested_stream_with_complex_columns_resulting_into_long_names/partition diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_data.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_data.sql index 0cd481382f10..3f46c9e431c0 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_data.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_data.sql @@ -13,53 +13,16 @@ with __dbt__cte__nested_stream_with_complex_columns_resulting_into_long_names_pa -- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema -- depends_on: "integrationtests".test_normalization."nested_stream_with_complex_columns_resulting_into_long_names_partition" -with numbers as ( - - - - - with p as ( - select 0 as generated_number union all select 1 - ), unioned as ( - - select - - - p0.generated_number * power(2, 0) - - - + 1 - as generated_number - - from - - - p as p0 - - - - ) - - select * - from unioned - where generated_number <= 1 - order by generated_number - -), -joined as ( - select - _airbyte_partition_hashid as _airbyte_hashid, - json_extract_array_element_text(data, numbers.generated_number::int - 1, true) as _airbyte_nested_data - from "integrationtests".test_normalization."nested_stream_with_complex_columns_resulting_into_long_names_partition" - cross join numbers - -- only generate the number of records in the cross join that corresponds - -- to the number of items in "integrationtests".test_normalization."nested_stream_with_complex_columns_resulting_into_long_names_partition".data - where numbers.generated_number <= json_array_length(data, true) -) + with joined as ( + select + table_alias._airbyte_partition_hashid as _airbyte_hashid, + _airbyte_nested_data + from "integrationtests".test_normalization."nested_stream_with_complex_columns_resulting_into_long_names_partition" as table_alias, table_alias.data as _airbyte_nested_data + ) select _airbyte_partition_hashid, - case when json_extract_path_text(_airbyte_nested_data, 'currency', true) != '' then json_extract_path_text(_airbyte_nested_data, 'currency', true) end as currency, + case when _airbyte_nested_data."currency" != '' then _airbyte_nested_data."currency" end as currency, _airbyte_ab_id, _airbyte_emitted_at, getdate() as _airbyte_normalized_at @@ -75,7 +38,7 @@ and data is not null -- depends_on: __dbt__cte__nested_stream_with_complex_columns_resulting_into_long_names_partition_data_ab1 select _airbyte_partition_hashid, - cast(currency as varchar) as currency, + cast(currency as text) as currency, _airbyte_ab_id, _airbyte_emitted_at, getdate() as _airbyte_normalized_at @@ -88,7 +51,7 @@ where 1 = 1 -- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__nested_stream_with_complex_columns_resulting_into_long_names_partition_data_ab2 select - md5(cast(coalesce(cast(_airbyte_partition_hashid as varchar), '') || '-' || coalesce(cast(currency as varchar), '') as varchar)) as _airbyte_data_hashid, + md5(cast(coalesce(cast(_airbyte_partition_hashid as text), '') || '-' || coalesce(cast(currency as text), '') as text)) as _airbyte_data_hashid, tmp.* from __dbt__cte__nested_stream_with_complex_columns_resulting_into_long_names_partition_data_ab2 tmp -- data at nested_stream_with_complex_columns_resulting_into_long_names/partition/DATA diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data.sql index 0ef9e77bb055..84fbebb03b50 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data.sql @@ -13,53 +13,16 @@ with __dbt__cte__nested_stream_with_complex_columns_resulting_into_long_names_pa -- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema -- depends_on: "integrationtests".test_normalization."nested_stream_with_complex_columns_resulting_into_long_names_partition" -with numbers as ( - - - - - with p as ( - select 0 as generated_number union all select 1 - ), unioned as ( - - select - - - p0.generated_number * power(2, 0) - - - + 1 - as generated_number - - from - - - p as p0 - - - - ) - - select * - from unioned - where generated_number <= 2 - order by generated_number - -), -joined as ( - select - _airbyte_partition_hashid as _airbyte_hashid, - json_extract_array_element_text(double_array_data, numbers.generated_number::int - 1, true) as _airbyte_nested_data - from "integrationtests".test_normalization."nested_stream_with_complex_columns_resulting_into_long_names_partition" - cross join numbers - -- only generate the number of records in the cross join that corresponds - -- to the number of items in "integrationtests".test_normalization."nested_stream_with_complex_columns_resulting_into_long_names_partition".double_array_data - where numbers.generated_number <= json_array_length(double_array_data, true) -) + with joined as ( + select + table_alias._airbyte_partition_hashid as _airbyte_hashid, + _airbyte_nested_data + from "integrationtests".test_normalization."nested_stream_with_complex_columns_resulting_into_long_names_partition" as table_alias, table_alias.double_array_data as _airbyte_nested_data + ) select _airbyte_partition_hashid, - case when json_extract_path_text(_airbyte_nested_data, 'id', true) != '' then json_extract_path_text(_airbyte_nested_data, 'id', true) end as id, + case when _airbyte_nested_data."id" != '' then _airbyte_nested_data."id" end as id, _airbyte_ab_id, _airbyte_emitted_at, getdate() as _airbyte_normalized_at @@ -75,7 +38,7 @@ and double_array_data is not null -- depends_on: __dbt__cte__nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data_ab1 select _airbyte_partition_hashid, - cast(id as varchar) as id, + cast(id as text) as id, _airbyte_ab_id, _airbyte_emitted_at, getdate() as _airbyte_normalized_at @@ -88,7 +51,7 @@ where 1 = 1 -- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data_ab2 select - md5(cast(coalesce(cast(_airbyte_partition_hashid as varchar), '') || '-' || coalesce(cast(id as varchar), '') as varchar)) as _airbyte_double_array_data_hashid, + md5(cast(coalesce(cast(_airbyte_partition_hashid as text), '') || '-' || coalesce(cast(id as text), '') as text)) as _airbyte_double_array_data_hashid, tmp.* from __dbt__cte__nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data_ab2 tmp -- double_array_data at nested_stream_with_complex_columns_resulting_into_long_names/partition/double_array_data diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_ab1.sql index 4e3c132b7818..c6c4c7bb3973 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_ab1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_ab1.sql @@ -16,5 +16,5 @@ select from {{ source('test_normalization', '_airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names') }} as table_alias -- nested_stream_with_complex_columns_resulting_into_long_names where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_ab2.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_ab2.sql index 35b02dc8a020..41c1f86de0c7 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_ab2.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_ab2.sql @@ -16,5 +16,5 @@ select from {{ ref('nested_stream_with_complex_columns_resulting_into_long_names_ab1') }} -- nested_stream_with_complex_columns_resulting_into_long_names where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_ab1.sql index 96a05e3dd72c..7d9968fe7170 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_ab1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_ab1.sql @@ -16,5 +16,5 @@ from {{ ref('nested_stream_with_complex_columns_resulting_into_long_names_scd') -- partition at nested_stream_with_complex_columns_resulting_into_long_names/partition where 1 = 1 and {{ adapter.quote('partition') }} is not null -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_data_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_data_ab1.sql index a50b54ec3b7b..b3f16a06f544 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_data_ab1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_data_ab1.sql @@ -17,5 +17,5 @@ from {{ ref('nested_stream_with_complex_columns_resulting_into_long_names_partit {{ cross_join_unnest('partition', 'data') }} where 1 = 1 and data is not null -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data_ab1.sql index ff6a32a5cf48..7fe25a4c8eca 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data_ab1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_ctes/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data_ab1.sql @@ -17,5 +17,5 @@ from {{ ref('nested_stream_with_complex_columns_resulting_into_long_names_partit {{ cross_join_unnest('partition', 'double_array_data') }} where 1 = 1 and double_array_data is not null -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_incremental/scd/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_incremental/scd/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_scd.sql index e89e97f58fea..627f56e3ad2a 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_incremental/scd/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_incremental/scd/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_scd.sql @@ -2,7 +2,53 @@ sort = ["_airbyte_active_row", "_airbyte_unique_key_scd", "_airbyte_emitted_at"], unique_key = "_airbyte_unique_key_scd", schema = "test_normalization", - post_hook = ["drop view _airbyte_test_normalization.nested_stream_with_complex_columns_resulting_into_long_names_stg"], + post_hook = [" + {% + set final_table_relation = adapter.get_relation( + database=this.database, + schema=this.schema, + identifier='nested_stream_with_complex_columns_resulting_into_long_names' + ) + %} + {# + If the final table doesn't exist, then obviously we can't delete anything from it. + Also, after a reset, the final table is created without the _airbyte_unique_key column (this column is created during the first sync) + So skip this deletion if the column doesn't exist. (in this case, the table is guaranteed to be empty anyway) + #} + {% + if final_table_relation is not none and '_airbyte_unique_key' in adapter.get_columns_in_relation(final_table_relation)|map(attribute='name') + %} + -- Delete records which are no longer active: + -- This query is equivalent, but the left join version is more performant: + -- delete from final_table where unique_key in ( + -- select unique_key from scd_table where 1 = 1 + -- ) and unique_key not in ( + -- select unique_key from scd_table where active_row = 1 + -- ) + -- We're incremental against normalized_at rather than emitted_at because we need to fetch the SCD + -- entries that were _updated_ recently. This is because a deleted record will have an SCD record + -- which was emitted a long time ago, but recently re-normalized to have active_row = 0. + delete from {{ final_table_relation }} where {{ final_table_relation }}._airbyte_unique_key in ( + select recent_records.unique_key + from ( + select distinct _airbyte_unique_key as unique_key + from {{ this }} + where 1=1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('nested_stream_with_complex_columns_resulting_into_long_names')) }} + ) recent_records + left join ( + select _airbyte_unique_key as unique_key, count(_airbyte_unique_key) as active_count + from {{ this }} + where _airbyte_active_row = 1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('nested_stream_with_complex_columns_resulting_into_long_names')) }} + group by _airbyte_unique_key + ) active_counts + on recent_records.unique_key = active_counts.unique_key + where active_count is null or active_count = 0 + ) + {% else %} + -- We have to have a non-empty query, so just do a noop delete + delete from {{ this }} where 1=0 + {% endif %} + ","drop view _airbyte_test_normalization.nested_stream_with_complex_columns_resulting_into_long_names_stg"], tags = [ "top-level" ] ) }} -- depends_on: ref('nested_stream_with_complex_columns_resulting_into_long_names_stg') @@ -15,7 +61,7 @@ new_data as ( from {{ ref('nested_stream_with_complex_columns_resulting_into_long_names_stg') }} -- nested_stream_with_complex_columns_resulting_into_long_names from {{ source('test_normalization', '_airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names') }} where 1 = 1 - {{ incremental_clause('_airbyte_emitted_at') }} + {{ incremental_clause('_airbyte_emitted_at', this) }} ), new_data_ids as ( -- build a subset of _airbyte_unique_key from rows that are new diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names.sql index d6f1dc869ff5..7b608604bbee 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names.sql @@ -19,5 +19,5 @@ from {{ ref('nested_stream_with_complex_columns_resulting_into_long_names_scd') -- nested_stream_with_complex_columns_resulting_into_long_names from {{ source('test_normalization', '_airbyte_raw_nested_stream_with_complex_columns_resulting_into_long_names') }} where 1 = 1 and _airbyte_active_row = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition.sql index 030b87c4b768..10a422df08a2 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition.sql @@ -16,5 +16,5 @@ select from {{ ref('nested_stream_with_complex_columns_resulting_into_long_names_partition_ab3') }} -- partition at nested_stream_with_complex_columns_resulting_into_long_names/partition from {{ ref('nested_stream_with_complex_columns_resulting_into_long_names_scd') }} where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_data.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_data.sql index b13b4ba13628..4551ff86747c 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_data.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_data.sql @@ -15,5 +15,5 @@ select from {{ ref('nested_stream_with_complex_columns_resulting_into_long_names_partition_data_ab3') }} -- data at nested_stream_with_complex_columns_resulting_into_long_names/partition/DATA from {{ ref('nested_stream_with_complex_columns_resulting_into_long_names_partition') }} where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data.sql index bf4fd96d2192..3f7bbf34e3e8 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/models/generated/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data.sql @@ -15,5 +15,5 @@ select from {{ ref('nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data_ab3') }} -- double_array_data at nested_stream_with_complex_columns_resulting_into_long_names/partition/double_array_data from {{ ref('nested_stream_with_complex_columns_resulting_into_long_names_partition') }} where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/dbt_project.yml b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/dbt_project.yml index 88dde818dd4d..06d2109d3356 100755 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/dbt_project.yml +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/dbt_project.yml @@ -1,46 +1,32 @@ -# This file is necessary to install dbt-utils with dbt deps -# the content will be overwritten by the transform function - -# Name your package! Package names should contain only lowercase characters -# and underscores. A good package name should reflect your organization's -# name or the intended use of these models -name: "airbyte_utils" +name: airbyte_utils version: "1.0" config-version: 2 - -# This setting configures which "profile" dbt uses for this project. Profiles contain -# database connection information, and should be configured in the ~/.dbt/profiles.yml file -profile: "normalize" - -# These configurations specify where dbt should look for different types of files. -# The `model-paths` config, for example, states that source models can be found -# in the "models/" directory. You probably won't need to change these! -model-paths: ["modified_models"] -docs-paths: ["docs"] -analysis-paths: ["analysis"] -test-paths: ["tests"] -seed-paths: ["data"] -macro-paths: ["macros"] - -target-path: "../build" # directory which will store compiled SQL files -log-path: "../logs" # directory which will store DBT logs -packages-install-path: "/dbt" # directory which will store external DBT dependencies - -clean-targets: # directories to be removed by `dbt clean` - - "build" - - "dbt_modules" - +profile: normalize +model-paths: + - modified_models +docs-paths: + - docs +analysis-paths: + - analysis +test-paths: + - tests +seed-paths: + - data +macro-paths: + - macros +target-path: ../build +log-path: ../logs +packages-install-path: /dbt +clean-targets: + - build + - dbt_modules quoting: database: true - # Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) - # all schemas should be unquoted schema: false identifier: true - -# You can define configurations for models in the `model-paths` directory here. -# Using these configurations, you can enable or disable models, change how they -# are materialized, and more! models: + +transient: false + +pre-hook: SET enable_case_sensitive_identifier to TRUE airbyte_utils: +materialized: table generated: @@ -57,7 +43,30 @@ models: airbyte_views: +tags: airbyte_internal_views +materialized: view - dispatch: - macro_namespace: dbt_utils - search_order: ["airbyte_utils", "dbt_utils"] + search_order: + - airbyte_utils + - dbt_utils +vars: + json_column: _airbyte_data + models_to_source: + exchange_rate_ab1: test_normalization._airbyte_raw_exchange_rate + exchange_rate_ab2: test_normalization._airbyte_raw_exchange_rate + exchange_rate_ab3: test_normalization._airbyte_raw_exchange_rate + exchange_rate: test_normalization._airbyte_raw_exchange_rate + dedup_exchange_rate_ab1: test_normalization._airbyte_raw_dedup_exchange_rate + dedup_exchange_rate_ab2: test_normalization._airbyte_raw_dedup_exchange_rate + dedup_exchange_rate_stg: test_normalization._airbyte_raw_dedup_exchange_rate + dedup_exchange_rate_scd: test_normalization._airbyte_raw_dedup_exchange_rate + dedup_exchange_rate: test_normalization._airbyte_raw_dedup_exchange_rate + renamed_dedup_cdc_excluded_ab1: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded_ab2: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded_stg: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded_scd: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + dedup_cdc_excluded_ab1: test_normalization._airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded_ab2: test_normalization._airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded_stg: test_normalization._airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded_scd: test_normalization._airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded: test_normalization._airbyte_raw_dedup_cdc_excluded diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_dbt_project.yml b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_dbt_project.yml index 7631ef356dc9..5b2760dc9d0f 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_dbt_project.yml +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_dbt_project.yml @@ -1,46 +1,32 @@ -# This file is necessary to install dbt-utils with dbt deps -# the content will be overwritten by the transform function - -# Name your package! Package names should contain only lowercase characters -# and underscores. A good package name should reflect your organization's -# name or the intended use of these models -name: "airbyte_utils" +name: airbyte_utils version: "1.0" config-version: 2 - -# This setting configures which "profile" dbt uses for this project. Profiles contain -# database connection information, and should be configured in the ~/.dbt/profiles.yml file -profile: "normalize" - -# These configurations specify where dbt should look for different types of files. -# The `model-paths` config, for example, states that source models can be found -# in the "models/" directory. You probably won't need to change these! -model-paths: ["models"] -docs-paths: ["docs"] -analysis-paths: ["analysis"] -test-paths: ["tests"] -seed-paths: ["data"] -macro-paths: ["macros"] - -target-path: "../build" # directory which will store compiled SQL files -log-path: "../logs" # directory which will store DBT logs -packages-install-path: "/dbt" # directory which will store external DBT dependencies - -clean-targets: # directories to be removed by `dbt clean` - - "build" - - "dbt_modules" - +profile: normalize +model-paths: + - models +docs-paths: + - docs +analysis-paths: + - analysis +test-paths: + - tests +seed-paths: + - data +macro-paths: + - macros +target-path: ../build +log-path: ../logs +packages-install-path: /dbt +clean-targets: + - build + - dbt_modules quoting: database: true - # Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) - # all schemas should be unquoted schema: false identifier: true - -# You can define configurations for models in the `model-paths` directory here. -# Using these configurations, you can enable or disable models, change how they -# are materialized, and more! models: + +transient: false + +pre-hook: SET enable_case_sensitive_identifier to TRUE airbyte_utils: +materialized: table generated: @@ -57,7 +43,45 @@ models: airbyte_views: +tags: airbyte_internal_views +materialized: view - dispatch: - macro_namespace: dbt_utils - search_order: ["airbyte_utils", "dbt_utils"] + search_order: + - airbyte_utils + - dbt_utils +vars: + json_column: _airbyte_data + models_to_source: + exchange_rate_ab1: test_normalization._airbyte_raw_exchange_rate + exchange_rate_ab2: test_normalization._airbyte_raw_exchange_rate + exchange_rate_ab3: test_normalization._airbyte_raw_exchange_rate + exchange_rate: test_normalization._airbyte_raw_exchange_rate + dedup_exchange_rate_ab1: test_normalization._airbyte_raw_dedup_exchange_rate + dedup_exchange_rate_ab2: test_normalization._airbyte_raw_dedup_exchange_rate + dedup_exchange_rate_stg: test_normalization._airbyte_raw_dedup_exchange_rate + dedup_exchange_rate_scd: test_normalization._airbyte_raw_dedup_exchange_rate + dedup_exchange_rate: test_normalization._airbyte_raw_dedup_exchange_rate + renamed_dedup_cdc_excluded_ab1: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded_ab2: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded_stg: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded_scd: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + renamed_dedup_cdc_excluded: test_normalization._airbyte_raw_renamed_dedup_cdc_excluded + dedup_cdc_excluded_ab1: test_normalization._airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded_ab2: test_normalization._airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded_stg: test_normalization._airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded_scd: test_normalization._airbyte_raw_dedup_cdc_excluded + dedup_cdc_excluded: test_normalization._airbyte_raw_dedup_cdc_excluded + pos_dedup_cdcx_ab1: test_normalization._airbyte_raw_pos_dedup_cdcx + pos_dedup_cdcx_ab2: test_normalization._airbyte_raw_pos_dedup_cdcx + pos_dedup_cdcx_stg: test_normalization._airbyte_raw_pos_dedup_cdcx + pos_dedup_cdcx_scd: test_normalization._airbyte_raw_pos_dedup_cdcx + pos_dedup_cdcx: test_normalization._airbyte_raw_pos_dedup_cdcx + 1_prefix_startwith_number_ab1: test_normalization._airbyte_raw_1_prefix_startwith_number + 1_prefix_startwith_number_ab2: test_normalization._airbyte_raw_1_prefix_startwith_number + 1_prefix_startwith_number_stg: test_normalization._airbyte_raw_1_prefix_startwith_number + 1_prefix_startwith_number_scd: test_normalization._airbyte_raw_1_prefix_startwith_number + 1_prefix_startwith_number: test_normalization._airbyte_raw_1_prefix_startwith_number + multiple_column_names_conflicts_ab1: test_normalization._airbyte_raw_multiple_column_names_conflicts + multiple_column_names_conflicts_ab2: test_normalization._airbyte_raw_multiple_column_names_conflicts + multiple_column_names_conflicts_stg: test_normalization._airbyte_raw_multiple_column_names_conflicts + multiple_column_names_conflicts_scd: test_normalization._airbyte_raw_multiple_column_names_conflicts + multiple_column_names_conflicts: test_normalization._airbyte_raw_multiple_column_names_conflicts diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_output/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_output/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql index 14ff0512e8af..2d58288b13ea 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_output/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_output/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql @@ -21,7 +21,7 @@ input_data as ( scd_data as ( -- SQL model to build a Type 2 Slowly Changing Dimension (SCD) table for each record identified by their primary key select - md5(cast(coalesce(cast(id as varchar), '') || '-' || coalesce(cast(currency as varchar), '') || '-' || coalesce(cast(nzd as varchar), '') as varchar)) as _airbyte_unique_key, + md5(cast(coalesce(cast(id as text), '') || '-' || coalesce(cast(currency as text), '') || '-' || coalesce(cast(nzd as text), '') as text)) as _airbyte_unique_key, id, currency, date, @@ -32,14 +32,14 @@ scd_data as ( usd, date as _airbyte_start_at, lag(date) over ( - partition by id, currency, cast(nzd as varchar) + partition by id, currency, cast(nzd as text) order by date is null asc, date desc, _airbyte_emitted_at desc ) as _airbyte_end_at, case when row_number() over ( - partition by id, currency, cast(nzd as varchar) + partition by id, currency, cast(nzd as text) order by date is null asc, date desc, @@ -61,7 +61,7 @@ dedup_data as ( _airbyte_emitted_at order by _airbyte_active_row desc, _airbyte_ab_id ) as _airbyte_row_num, - md5(cast(coalesce(cast(_airbyte_unique_key as varchar), '') || '-' || coalesce(cast(_airbyte_start_at as varchar), '') || '-' || coalesce(cast(_airbyte_emitted_at as varchar), '') as varchar)) as _airbyte_unique_key_scd, + md5(cast(coalesce(cast(_airbyte_unique_key as text), '') || '-' || coalesce(cast(_airbyte_start_at as text), '') || '-' || coalesce(cast(_airbyte_emitted_at as text), '') as text)) as _airbyte_unique_key_scd, scd_data.* from scd_data ) diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_output/airbyte_tables/test_normalization/exchange_rate.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_output/airbyte_tables/test_normalization/exchange_rate.sql index 0d13846cdfd4..f2537f70055b 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_output/airbyte_tables/test_normalization/exchange_rate.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_output/airbyte_tables/test_normalization/exchange_rate.sql @@ -13,15 +13,15 @@ with __dbt__cte__exchange_rate_ab1 as ( -- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema -- depends_on: "integrationtests".test_normalization._airbyte_raw_exchange_rate select - case when json_extract_path_text(_airbyte_data, 'id', true) != '' then json_extract_path_text(_airbyte_data, 'id', true) end as id, - case when json_extract_path_text(_airbyte_data, 'currency', true) != '' then json_extract_path_text(_airbyte_data, 'currency', true) end as currency, - case when json_extract_path_text(_airbyte_data, 'date', true) != '' then json_extract_path_text(_airbyte_data, 'date', true) end as date, - case when json_extract_path_text(_airbyte_data, 'timestamp_col', true) != '' then json_extract_path_text(_airbyte_data, 'timestamp_col', true) end as timestamp_col, - case when json_extract_path_text(_airbyte_data, 'HKD@spƩƧiƤl & characters', true) != '' then json_extract_path_text(_airbyte_data, 'HKD@spƩƧiƤl & characters', true) end as "hkd@spƩƧiƤl & characters", - case when json_extract_path_text(_airbyte_data, 'HKD_special___characters', true) != '' then json_extract_path_text(_airbyte_data, 'HKD_special___characters', true) end as hkd_special___characters, - case when json_extract_path_text(_airbyte_data, 'NZD', true) != '' then json_extract_path_text(_airbyte_data, 'NZD', true) end as nzd, - case when json_extract_path_text(_airbyte_data, 'USD', true) != '' then json_extract_path_text(_airbyte_data, 'USD', true) end as usd, - case when json_extract_path_text(_airbyte_data, 'column`_''with"_quotes', true) != '' then json_extract_path_text(_airbyte_data, 'column`_''with"_quotes', true) end as "column`_'with""_quotes", + case when _airbyte_data."id" != '' then _airbyte_data."id" end as id, + case when _airbyte_data."currency" != '' then _airbyte_data."currency" end as currency, + case when _airbyte_data."date" != '' then _airbyte_data."date" end as date, + case when _airbyte_data."timestamp_col" != '' then _airbyte_data."timestamp_col" end as timestamp_col, + case when _airbyte_data."HKD@spƩƧiƤl & characters" != '' then _airbyte_data."HKD@spƩƧiƤl & characters" end as "hkd@spƩƧiƤl & characters", + case when _airbyte_data."HKD_special___characters" != '' then _airbyte_data."HKD_special___characters" end as hkd_special___characters, + case when _airbyte_data."NZD" != '' then _airbyte_data."NZD" end as nzd, + case when _airbyte_data."USD" != '' then _airbyte_data."USD" end as usd, + case when _airbyte_data."column`_'with""_quotes" != '' then _airbyte_data."column`_'with""_quotes" end as "column`_'with""_quotes", _airbyte_ab_id, _airbyte_emitted_at, getdate() as _airbyte_normalized_at @@ -36,24 +36,24 @@ select cast(id as bigint ) as id, - cast(currency as varchar) as currency, - cast(nullif(date, '') as + cast(currency as text) as currency, + cast(nullif(date::varchar, '') as date ) as date, - cast(nullif(timestamp_col, '') as + cast(nullif(timestamp_col::varchar, '') as timestamp with time zone ) as timestamp_col, cast("hkd@spƩƧiƤl & characters" as float ) as "hkd@spƩƧiƤl & characters", - cast(hkd_special___characters as varchar) as hkd_special___characters, + cast(hkd_special___characters as text) as hkd_special___characters, cast(nzd as float ) as nzd, cast(usd as float ) as usd, - cast("column`_'with""_quotes" as varchar) as "column`_'with""_quotes", + cast("column`_'with""_quotes" as text) as "column`_'with""_quotes", _airbyte_ab_id, _airbyte_emitted_at, getdate() as _airbyte_normalized_at @@ -65,7 +65,7 @@ where 1 = 1 -- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__exchange_rate_ab2 select - md5(cast(coalesce(cast(id as varchar), '') || '-' || coalesce(cast(currency as varchar), '') || '-' || coalesce(cast(date as varchar), '') || '-' || coalesce(cast(timestamp_col as varchar), '') || '-' || coalesce(cast("hkd@spƩƧiƤl & characters" as varchar), '') || '-' || coalesce(cast(hkd_special___characters as varchar), '') || '-' || coalesce(cast(nzd as varchar), '') || '-' || coalesce(cast(usd as varchar), '') || '-' || coalesce(cast("column`_'with""_quotes" as varchar), '') as varchar)) as _airbyte_exchange_rate_hashid, + md5(cast(coalesce(cast(id as text), '') || '-' || coalesce(cast(currency as text), '') || '-' || coalesce(cast(date as text), '') || '-' || coalesce(cast(timestamp_col as text), '') || '-' || coalesce(cast("hkd@spƩƧiƤl & characters" as text), '') || '-' || coalesce(cast(hkd_special___characters as text), '') || '-' || coalesce(cast(nzd as text), '') || '-' || coalesce(cast(usd as text), '') || '-' || coalesce(cast("column`_'with""_quotes" as text), '') as text)) as _airbyte_exchange_rate_hashid, tmp.* from __dbt__cte__exchange_rate_ab2 tmp -- exchange_rate diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_output/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_output/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql index 2c02508e5b87..f20d8dbdc37b 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_output/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_output/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql @@ -7,14 +7,14 @@ with __dbt__cte__dedup_exchange_rate_ab1 as ( -- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema -- depends_on: "integrationtests".test_normalization._airbyte_raw_dedup_exchange_rate select - case when json_extract_path_text(_airbyte_data, 'id', true) != '' then json_extract_path_text(_airbyte_data, 'id', true) end as id, - case when json_extract_path_text(_airbyte_data, 'currency', true) != '' then json_extract_path_text(_airbyte_data, 'currency', true) end as currency, - case when json_extract_path_text(_airbyte_data, 'date', true) != '' then json_extract_path_text(_airbyte_data, 'date', true) end as date, - case when json_extract_path_text(_airbyte_data, 'timestamp_col', true) != '' then json_extract_path_text(_airbyte_data, 'timestamp_col', true) end as timestamp_col, - case when json_extract_path_text(_airbyte_data, 'HKD@spƩƧiƤl & characters', true) != '' then json_extract_path_text(_airbyte_data, 'HKD@spƩƧiƤl & characters', true) end as "hkd@spƩƧiƤl & characters", - case when json_extract_path_text(_airbyte_data, 'HKD_special___characters', true) != '' then json_extract_path_text(_airbyte_data, 'HKD_special___characters', true) end as hkd_special___characters, - case when json_extract_path_text(_airbyte_data, 'NZD', true) != '' then json_extract_path_text(_airbyte_data, 'NZD', true) end as nzd, - case when json_extract_path_text(_airbyte_data, 'USD', true) != '' then json_extract_path_text(_airbyte_data, 'USD', true) end as usd, + case when _airbyte_data."id" != '' then _airbyte_data."id" end as id, + case when _airbyte_data."currency" != '' then _airbyte_data."currency" end as currency, + case when _airbyte_data."date" != '' then _airbyte_data."date" end as date, + case when _airbyte_data."timestamp_col" != '' then _airbyte_data."timestamp_col" end as timestamp_col, + case when _airbyte_data."HKD@spƩƧiƤl & characters" != '' then _airbyte_data."HKD@spƩƧiƤl & characters" end as "hkd@spƩƧiƤl & characters", + case when _airbyte_data."HKD_special___characters" != '' then _airbyte_data."HKD_special___characters" end as hkd_special___characters, + case when _airbyte_data."NZD" != '' then _airbyte_data."NZD" end as nzd, + case when _airbyte_data."USD" != '' then _airbyte_data."USD" end as usd, _airbyte_ab_id, _airbyte_emitted_at, getdate() as _airbyte_normalized_at @@ -30,17 +30,17 @@ select cast(id as bigint ) as id, - cast(currency as varchar) as currency, - cast(nullif(date, '') as + cast(currency as text) as currency, + cast(nullif(date::varchar, '') as date ) as date, - cast(nullif(timestamp_col, '') as + cast(nullif(timestamp_col::varchar, '') as timestamp with time zone ) as timestamp_col, cast("hkd@spƩƧiƤl & characters" as float ) as "hkd@spƩƧiƤl & characters", - cast(hkd_special___characters as varchar) as hkd_special___characters, + cast(hkd_special___characters as text) as hkd_special___characters, cast(nzd as float ) as nzd, @@ -57,7 +57,7 @@ where 1 = 1 )-- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__dedup_exchange_rate_ab2 select - md5(cast(coalesce(cast(id as varchar), '') || '-' || coalesce(cast(currency as varchar), '') || '-' || coalesce(cast(date as varchar), '') || '-' || coalesce(cast(timestamp_col as varchar), '') || '-' || coalesce(cast("hkd@spƩƧiƤl & characters" as varchar), '') || '-' || coalesce(cast(hkd_special___characters as varchar), '') || '-' || coalesce(cast(nzd as varchar), '') || '-' || coalesce(cast(usd as varchar), '') as varchar)) as _airbyte_dedup_exchange_rate_hashid, + md5(cast(coalesce(cast(id as text), '') || '-' || coalesce(cast(currency as text), '') || '-' || coalesce(cast(date as text), '') || '-' || coalesce(cast(timestamp_col as text), '') || '-' || coalesce(cast("hkd@spƩƧiƤl & characters" as text), '') || '-' || coalesce(cast(hkd_special___characters as text), '') || '-' || coalesce(cast(nzd as text), '') || '-' || coalesce(cast(usd as text), '') as text)) as _airbyte_dedup_exchange_rate_hashid, tmp.* from __dbt__cte__dedup_exchange_rate_ab2 tmp -- dedup_exchange_rate diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_output/airbyte_views/test_normalization/multiple_column_names_conflicts_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_output/airbyte_views/test_normalization/multiple_column_names_conflicts_stg.sql index 0777ba0c5393..37786e8c5256 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_output/airbyte_views/test_normalization/multiple_column_names_conflicts_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_output/airbyte_views/test_normalization/multiple_column_names_conflicts_stg.sql @@ -7,13 +7,13 @@ with __dbt__cte__multiple_column_names_conflicts_ab1 as ( -- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema -- depends_on: "integrationtests".test_normalization._airbyte_raw_multiple_column_names_conflicts select - case when json_extract_path_text(_airbyte_data, 'id', true) != '' then json_extract_path_text(_airbyte_data, 'id', true) end as id, - case when json_extract_path_text(_airbyte_data, 'User Id', true) != '' then json_extract_path_text(_airbyte_data, 'User Id', true) end as "user id", - case when json_extract_path_text(_airbyte_data, 'user_id', true) != '' then json_extract_path_text(_airbyte_data, 'user_id', true) end as user_id, - case when json_extract_path_text(_airbyte_data, 'User id', true) != '' then json_extract_path_text(_airbyte_data, 'User id', true) end as "user id_1", - case when json_extract_path_text(_airbyte_data, 'user id', true) != '' then json_extract_path_text(_airbyte_data, 'user id', true) end as "user id_2", - case when json_extract_path_text(_airbyte_data, 'User@Id', true) != '' then json_extract_path_text(_airbyte_data, 'User@Id', true) end as "user@id", - case when json_extract_path_text(_airbyte_data, 'UserId', true) != '' then json_extract_path_text(_airbyte_data, 'UserId', true) end as userid, + case when _airbyte_data."id" != '' then _airbyte_data."id" end as id, + case when _airbyte_data."User Id" != '' then _airbyte_data."User Id" end as "user id", + case when _airbyte_data."user_id" != '' then _airbyte_data."user_id" end as user_id, + case when _airbyte_data."User id" != '' then _airbyte_data."User id" end as "user id_1", + case when _airbyte_data."user id" != '' then _airbyte_data."user id" end as "user id_2", + case when _airbyte_data."User@Id" != '' then _airbyte_data."User@Id" end as "user@id", + case when _airbyte_data."UserId" != '' then _airbyte_data."UserId" end as userid, _airbyte_ab_id, _airbyte_emitted_at, getdate() as _airbyte_normalized_at @@ -29,7 +29,7 @@ select cast(id as bigint ) as id, - cast("user id" as varchar) as "user id", + cast("user id" as text) as "user id", cast(user_id as float ) as user_id, @@ -39,7 +39,7 @@ select cast("user id_2" as float ) as "user id_2", - cast("user@id" as varchar) as "user@id", + cast("user@id" as text) as "user@id", cast(userid as float ) as userid, @@ -53,7 +53,7 @@ where 1 = 1 )-- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__multiple_column_names_conflicts_ab2 select - md5(cast(coalesce(cast(id as varchar), '') || '-' || coalesce(cast("user id" as varchar), '') || '-' || coalesce(cast(user_id as varchar), '') || '-' || coalesce(cast("user id_1" as varchar), '') || '-' || coalesce(cast("user id_2" as varchar), '') || '-' || coalesce(cast("user@id" as varchar), '') || '-' || coalesce(cast(userid as varchar), '') as varchar)) as _airbyte_multiple_column_names_conflicts_hashid, + md5(cast(coalesce(cast(id as text), '') || '-' || coalesce(cast("user id" as text), '') || '-' || coalesce(cast(user_id as text), '') || '-' || coalesce(cast("user id_1" as text), '') || '-' || coalesce(cast("user id_2" as text), '') || '-' || coalesce(cast("user@id" as text), '') || '-' || coalesce(cast(userid as text), '') as text)) as _airbyte_multiple_column_names_conflicts_hashid, tmp.* from __dbt__cte__multiple_column_names_conflicts_ab2 tmp -- multiple_column_names_conflicts diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql index b737fc7a2998..17c4a88a2059 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql @@ -21,5 +21,5 @@ select from {{ source('test_normalization', '_airbyte_raw_dedup_exchange_rate') }} as table_alias -- dedup_exchange_rate where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql index 97341fcfad77..796d4205f5ae 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql @@ -21,5 +21,5 @@ select from {{ ref('dedup_exchange_rate_ab1') }} -- dedup_exchange_rate where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql index 3cb089de2de1..683191e161c5 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql @@ -2,7 +2,53 @@ sort = ["_airbyte_active_row", "_airbyte_unique_key_scd", "_airbyte_emitted_at"], unique_key = "_airbyte_unique_key_scd", schema = "test_normalization", - post_hook = ["drop view _airbyte_test_normalization.dedup_exchange_rate_stg"], + post_hook = [" + {% + set final_table_relation = adapter.get_relation( + database=this.database, + schema=this.schema, + identifier='dedup_exchange_rate' + ) + %} + {# + If the final table doesn't exist, then obviously we can't delete anything from it. + Also, after a reset, the final table is created without the _airbyte_unique_key column (this column is created during the first sync) + So skip this deletion if the column doesn't exist. (in this case, the table is guaranteed to be empty anyway) + #} + {% + if final_table_relation is not none and '_airbyte_unique_key' in adapter.get_columns_in_relation(final_table_relation)|map(attribute='name') + %} + -- Delete records which are no longer active: + -- This query is equivalent, but the left join version is more performant: + -- delete from final_table where unique_key in ( + -- select unique_key from scd_table where 1 = 1 + -- ) and unique_key not in ( + -- select unique_key from scd_table where active_row = 1 + -- ) + -- We're incremental against normalized_at rather than emitted_at because we need to fetch the SCD + -- entries that were _updated_ recently. This is because a deleted record will have an SCD record + -- which was emitted a long time ago, but recently re-normalized to have active_row = 0. + delete from {{ final_table_relation }} where {{ final_table_relation }}._airbyte_unique_key in ( + select recent_records.unique_key + from ( + select distinct _airbyte_unique_key as unique_key + from {{ this }} + where 1=1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('dedup_exchange_rate')) }} + ) recent_records + left join ( + select _airbyte_unique_key as unique_key, count(_airbyte_unique_key) as active_count + from {{ this }} + where _airbyte_active_row = 1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('dedup_exchange_rate')) }} + group by _airbyte_unique_key + ) active_counts + on recent_records.unique_key = active_counts.unique_key + where active_count is null or active_count = 0 + ) + {% else %} + -- We have to have a non-empty query, so just do a noop delete + delete from {{ this }} where 1=0 + {% endif %} + ","drop view _airbyte_test_normalization.dedup_exchange_rate_stg"], tags = [ "top-level" ] ) }} -- depends_on: ref('dedup_exchange_rate_stg') @@ -15,7 +61,7 @@ new_data as ( from {{ ref('dedup_exchange_rate_stg') }} -- dedup_exchange_rate from {{ source('test_normalization', '_airbyte_raw_dedup_exchange_rate') }} where 1 = 1 - {{ incremental_clause('_airbyte_emitted_at') }} + {{ incremental_clause('_airbyte_emitted_at', this) }} ), new_data_ids as ( -- build a subset of _airbyte_unique_key from rows that are new diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql index 4159603bae9e..d8b57a81b7cf 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql @@ -24,5 +24,5 @@ from {{ ref('dedup_exchange_rate_scd') }} -- dedup_exchange_rate from {{ source('test_normalization', '_airbyte_raw_dedup_exchange_rate') }} where 1 = 1 and _airbyte_active_row = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/models/generated/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/models/generated/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql index e0f6b9699b7d..8de81a6690f8 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/models/generated/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/models/generated/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql @@ -21,5 +21,5 @@ select from {{ ref('dedup_exchange_rate_ab2') }} tmp -- dedup_exchange_rate where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql index ab09ca51f11c..eca4c17d59fb 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab1.sql @@ -21,5 +21,5 @@ select from {{ source('test_normalization', '_airbyte_raw_dedup_exchange_rate') }} as table_alias -- dedup_exchange_rate where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql index 9b24d65d796e..bf26dc2829f0 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/modified_models/generated/airbyte_ctes/test_normalization/dedup_exchange_rate_ab2.sql @@ -21,5 +21,5 @@ select from {{ ref('dedup_exchange_rate_ab1') }} -- dedup_exchange_rate where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/modified_models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/modified_models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql index 0145a94818b0..2582b1213c70 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/modified_models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/modified_models/generated/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql @@ -2,7 +2,53 @@ sort = ["_airbyte_active_row", "_airbyte_unique_key_scd", "_airbyte_emitted_at"], unique_key = "_airbyte_unique_key_scd", schema = "test_normalization", - post_hook = ["drop view _airbyte_test_normalization.dedup_exchange_rate_stg"], + post_hook = [" + {% + set final_table_relation = adapter.get_relation( + database=this.database, + schema=this.schema, + identifier='dedup_exchange_rate' + ) + %} + {# + If the final table doesn't exist, then obviously we can't delete anything from it. + Also, after a reset, the final table is created without the _airbyte_unique_key column (this column is created during the first sync) + So skip this deletion if the column doesn't exist. (in this case, the table is guaranteed to be empty anyway) + #} + {% + if final_table_relation is not none and '_airbyte_unique_key' in adapter.get_columns_in_relation(final_table_relation)|map(attribute='name') + %} + -- Delete records which are no longer active: + -- This query is equivalent, but the left join version is more performant: + -- delete from final_table where unique_key in ( + -- select unique_key from scd_table where 1 = 1 + -- ) and unique_key not in ( + -- select unique_key from scd_table where active_row = 1 + -- ) + -- We're incremental against normalized_at rather than emitted_at because we need to fetch the SCD + -- entries that were _updated_ recently. This is because a deleted record will have an SCD record + -- which was emitted a long time ago, but recently re-normalized to have active_row = 0. + delete from {{ final_table_relation }} where {{ final_table_relation }}._airbyte_unique_key in ( + select recent_records.unique_key + from ( + select distinct _airbyte_unique_key as unique_key + from {{ this }} + where 1=1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('dedup_exchange_rate')) }} + ) recent_records + left join ( + select _airbyte_unique_key as unique_key, count(_airbyte_unique_key) as active_count + from {{ this }} + where _airbyte_active_row = 1 {{ incremental_clause('_airbyte_normalized_at', this.schema + '.' + adapter.quote('dedup_exchange_rate')) }} + group by _airbyte_unique_key + ) active_counts + on recent_records.unique_key = active_counts.unique_key + where active_count is null or active_count = 0 + ) + {% else %} + -- We have to have a non-empty query, so just do a noop delete + delete from {{ this }} where 1=0 + {% endif %} + ","drop view _airbyte_test_normalization.dedup_exchange_rate_stg"], tags = [ "top-level" ] ) }} -- depends_on: ref('dedup_exchange_rate_stg') @@ -15,7 +61,7 @@ new_data as ( from {{ ref('dedup_exchange_rate_stg') }} -- dedup_exchange_rate from {{ source('test_normalization', '_airbyte_raw_dedup_exchange_rate') }} where 1 = 1 - {{ incremental_clause('_airbyte_emitted_at') }} + {{ incremental_clause('_airbyte_emitted_at', this) }} ), new_data_ids as ( -- build a subset of _airbyte_unique_key from rows that are new diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/modified_models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/modified_models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql index c3b47dab239b..421177e81179 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/modified_models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/modified_models/generated/airbyte_incremental/test_normalization/dedup_exchange_rate.sql @@ -24,5 +24,5 @@ from {{ ref('dedup_exchange_rate_scd') }} -- dedup_exchange_rate from {{ source('test_normalization', '_airbyte_raw_dedup_exchange_rate') }} where 1 = 1 and _airbyte_active_row = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/modified_models/generated/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/modified_models/generated/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql index 1df86fb5598c..59153246fdb5 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/modified_models/generated/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/modified_models/generated/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql @@ -21,5 +21,5 @@ select from {{ ref('dedup_exchange_rate_ab2') }} tmp -- dedup_exchange_rate where 1 = 1 -{{ incremental_clause('_airbyte_emitted_at') }} +{{ incremental_clause('_airbyte_emitted_at', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/modified_models/generated/sources.yml b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/modified_models/generated/sources.yml index dd538a80131a..79ad1a1bb5c5 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/modified_models/generated/sources.yml +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/modified_models/generated/sources.yml @@ -1,11 +1,12 @@ version: 2 sources: -- name: test_normalization - quoting: - database: true - schema: false - identifier: false - tables: - - name: _airbyte_raw_dedup_exchange_rate - - name: _airbyte_raw_exchange_rate - - name: _airbyte_raw_renamed_dedup_cdc_excluded + - name: test_normalization + quoting: + database: true + schema: false + identifier: false + tables: + - name: _airbyte_raw_dedup_cdc_excluded + - name: _airbyte_raw_dedup_exchange_rate + - name: _airbyte_raw_exchange_rate + - name: _airbyte_raw_renamed_dedup_cdc_excluded diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/second_output/airbyte_tables/test_normalization/exchange_rate.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/second_output/airbyte_tables/test_normalization/exchange_rate.sql index 0d13846cdfd4..f2537f70055b 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/second_output/airbyte_tables/test_normalization/exchange_rate.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/second_output/airbyte_tables/test_normalization/exchange_rate.sql @@ -13,15 +13,15 @@ with __dbt__cte__exchange_rate_ab1 as ( -- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema -- depends_on: "integrationtests".test_normalization._airbyte_raw_exchange_rate select - case when json_extract_path_text(_airbyte_data, 'id', true) != '' then json_extract_path_text(_airbyte_data, 'id', true) end as id, - case when json_extract_path_text(_airbyte_data, 'currency', true) != '' then json_extract_path_text(_airbyte_data, 'currency', true) end as currency, - case when json_extract_path_text(_airbyte_data, 'date', true) != '' then json_extract_path_text(_airbyte_data, 'date', true) end as date, - case when json_extract_path_text(_airbyte_data, 'timestamp_col', true) != '' then json_extract_path_text(_airbyte_data, 'timestamp_col', true) end as timestamp_col, - case when json_extract_path_text(_airbyte_data, 'HKD@spƩƧiƤl & characters', true) != '' then json_extract_path_text(_airbyte_data, 'HKD@spƩƧiƤl & characters', true) end as "hkd@spƩƧiƤl & characters", - case when json_extract_path_text(_airbyte_data, 'HKD_special___characters', true) != '' then json_extract_path_text(_airbyte_data, 'HKD_special___characters', true) end as hkd_special___characters, - case when json_extract_path_text(_airbyte_data, 'NZD', true) != '' then json_extract_path_text(_airbyte_data, 'NZD', true) end as nzd, - case when json_extract_path_text(_airbyte_data, 'USD', true) != '' then json_extract_path_text(_airbyte_data, 'USD', true) end as usd, - case when json_extract_path_text(_airbyte_data, 'column`_''with"_quotes', true) != '' then json_extract_path_text(_airbyte_data, 'column`_''with"_quotes', true) end as "column`_'with""_quotes", + case when _airbyte_data."id" != '' then _airbyte_data."id" end as id, + case when _airbyte_data."currency" != '' then _airbyte_data."currency" end as currency, + case when _airbyte_data."date" != '' then _airbyte_data."date" end as date, + case when _airbyte_data."timestamp_col" != '' then _airbyte_data."timestamp_col" end as timestamp_col, + case when _airbyte_data."HKD@spƩƧiƤl & characters" != '' then _airbyte_data."HKD@spƩƧiƤl & characters" end as "hkd@spƩƧiƤl & characters", + case when _airbyte_data."HKD_special___characters" != '' then _airbyte_data."HKD_special___characters" end as hkd_special___characters, + case when _airbyte_data."NZD" != '' then _airbyte_data."NZD" end as nzd, + case when _airbyte_data."USD" != '' then _airbyte_data."USD" end as usd, + case when _airbyte_data."column`_'with""_quotes" != '' then _airbyte_data."column`_'with""_quotes" end as "column`_'with""_quotes", _airbyte_ab_id, _airbyte_emitted_at, getdate() as _airbyte_normalized_at @@ -36,24 +36,24 @@ select cast(id as bigint ) as id, - cast(currency as varchar) as currency, - cast(nullif(date, '') as + cast(currency as text) as currency, + cast(nullif(date::varchar, '') as date ) as date, - cast(nullif(timestamp_col, '') as + cast(nullif(timestamp_col::varchar, '') as timestamp with time zone ) as timestamp_col, cast("hkd@spƩƧiƤl & characters" as float ) as "hkd@spƩƧiƤl & characters", - cast(hkd_special___characters as varchar) as hkd_special___characters, + cast(hkd_special___characters as text) as hkd_special___characters, cast(nzd as float ) as nzd, cast(usd as float ) as usd, - cast("column`_'with""_quotes" as varchar) as "column`_'with""_quotes", + cast("column`_'with""_quotes" as text) as "column`_'with""_quotes", _airbyte_ab_id, _airbyte_emitted_at, getdate() as _airbyte_normalized_at @@ -65,7 +65,7 @@ where 1 = 1 -- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__exchange_rate_ab2 select - md5(cast(coalesce(cast(id as varchar), '') || '-' || coalesce(cast(currency as varchar), '') || '-' || coalesce(cast(date as varchar), '') || '-' || coalesce(cast(timestamp_col as varchar), '') || '-' || coalesce(cast("hkd@spƩƧiƤl & characters" as varchar), '') || '-' || coalesce(cast(hkd_special___characters as varchar), '') || '-' || coalesce(cast(nzd as varchar), '') || '-' || coalesce(cast(usd as varchar), '') || '-' || coalesce(cast("column`_'with""_quotes" as varchar), '') as varchar)) as _airbyte_exchange_rate_hashid, + md5(cast(coalesce(cast(id as text), '') || '-' || coalesce(cast(currency as text), '') || '-' || coalesce(cast(date as text), '') || '-' || coalesce(cast(timestamp_col as text), '') || '-' || coalesce(cast("hkd@spƩƧiƤl & characters" as text), '') || '-' || coalesce(cast(hkd_special___characters as text), '') || '-' || coalesce(cast(nzd as text), '') || '-' || coalesce(cast(usd as text), '') || '-' || coalesce(cast("column`_'with""_quotes" as text), '') as text)) as _airbyte_exchange_rate_hashid, tmp.* from __dbt__cte__exchange_rate_ab2 tmp -- exchange_rate diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/second_output/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/second_output/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql index 2c02508e5b87..f20d8dbdc37b 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/second_output/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/second_output/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql @@ -7,14 +7,14 @@ with __dbt__cte__dedup_exchange_rate_ab1 as ( -- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema -- depends_on: "integrationtests".test_normalization._airbyte_raw_dedup_exchange_rate select - case when json_extract_path_text(_airbyte_data, 'id', true) != '' then json_extract_path_text(_airbyte_data, 'id', true) end as id, - case when json_extract_path_text(_airbyte_data, 'currency', true) != '' then json_extract_path_text(_airbyte_data, 'currency', true) end as currency, - case when json_extract_path_text(_airbyte_data, 'date', true) != '' then json_extract_path_text(_airbyte_data, 'date', true) end as date, - case when json_extract_path_text(_airbyte_data, 'timestamp_col', true) != '' then json_extract_path_text(_airbyte_data, 'timestamp_col', true) end as timestamp_col, - case when json_extract_path_text(_airbyte_data, 'HKD@spƩƧiƤl & characters', true) != '' then json_extract_path_text(_airbyte_data, 'HKD@spƩƧiƤl & characters', true) end as "hkd@spƩƧiƤl & characters", - case when json_extract_path_text(_airbyte_data, 'HKD_special___characters', true) != '' then json_extract_path_text(_airbyte_data, 'HKD_special___characters', true) end as hkd_special___characters, - case when json_extract_path_text(_airbyte_data, 'NZD', true) != '' then json_extract_path_text(_airbyte_data, 'NZD', true) end as nzd, - case when json_extract_path_text(_airbyte_data, 'USD', true) != '' then json_extract_path_text(_airbyte_data, 'USD', true) end as usd, + case when _airbyte_data."id" != '' then _airbyte_data."id" end as id, + case when _airbyte_data."currency" != '' then _airbyte_data."currency" end as currency, + case when _airbyte_data."date" != '' then _airbyte_data."date" end as date, + case when _airbyte_data."timestamp_col" != '' then _airbyte_data."timestamp_col" end as timestamp_col, + case when _airbyte_data."HKD@spƩƧiƤl & characters" != '' then _airbyte_data."HKD@spƩƧiƤl & characters" end as "hkd@spƩƧiƤl & characters", + case when _airbyte_data."HKD_special___characters" != '' then _airbyte_data."HKD_special___characters" end as hkd_special___characters, + case when _airbyte_data."NZD" != '' then _airbyte_data."NZD" end as nzd, + case when _airbyte_data."USD" != '' then _airbyte_data."USD" end as usd, _airbyte_ab_id, _airbyte_emitted_at, getdate() as _airbyte_normalized_at @@ -30,17 +30,17 @@ select cast(id as bigint ) as id, - cast(currency as varchar) as currency, - cast(nullif(date, '') as + cast(currency as text) as currency, + cast(nullif(date::varchar, '') as date ) as date, - cast(nullif(timestamp_col, '') as + cast(nullif(timestamp_col::varchar, '') as timestamp with time zone ) as timestamp_col, cast("hkd@spƩƧiƤl & characters" as float ) as "hkd@spƩƧiƤl & characters", - cast(hkd_special___characters as varchar) as hkd_special___characters, + cast(hkd_special___characters as text) as hkd_special___characters, cast(nzd as float ) as nzd, @@ -57,7 +57,7 @@ where 1 = 1 )-- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__dedup_exchange_rate_ab2 select - md5(cast(coalesce(cast(id as varchar), '') || '-' || coalesce(cast(currency as varchar), '') || '-' || coalesce(cast(date as varchar), '') || '-' || coalesce(cast(timestamp_col as varchar), '') || '-' || coalesce(cast("hkd@spƩƧiƤl & characters" as varchar), '') || '-' || coalesce(cast(hkd_special___characters as varchar), '') || '-' || coalesce(cast(nzd as varchar), '') || '-' || coalesce(cast(usd as varchar), '') as varchar)) as _airbyte_dedup_exchange_rate_hashid, + md5(cast(coalesce(cast(id as text), '') || '-' || coalesce(cast(currency as text), '') || '-' || coalesce(cast(date as text), '') || '-' || coalesce(cast(timestamp_col as text), '') || '-' || coalesce(cast("hkd@spƩƧiƤl & characters" as text), '') || '-' || coalesce(cast(hkd_special___characters as text), '') || '-' || coalesce(cast(nzd as text), '') || '-' || coalesce(cast(usd as text), '') as text)) as _airbyte_dedup_exchange_rate_hashid, tmp.* from __dbt__cte__dedup_exchange_rate_ab2 tmp -- dedup_exchange_rate diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/third_output/airbyte_tables/test_normalization/exchange_rate.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/third_output/airbyte_tables/test_normalization/exchange_rate.sql index 5fb76893d793..1fa0ba1e56c4 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/third_output/airbyte_tables/test_normalization/exchange_rate.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/third_output/airbyte_tables/test_normalization/exchange_rate.sql @@ -13,15 +13,15 @@ with __dbt__cte__exchange_rate_ab1 as ( -- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema -- depends_on: "integrationtests".test_normalization._airbyte_raw_exchange_rate select - case when json_extract_path_text(_airbyte_data, 'id', true) != '' then json_extract_path_text(_airbyte_data, 'id', true) end as id, - case when json_extract_path_text(_airbyte_data, 'currency', true) != '' then json_extract_path_text(_airbyte_data, 'currency', true) end as currency, - case when json_extract_path_text(_airbyte_data, 'new_column', true) != '' then json_extract_path_text(_airbyte_data, 'new_column', true) end as new_column, - case when json_extract_path_text(_airbyte_data, 'date', true) != '' then json_extract_path_text(_airbyte_data, 'date', true) end as date, - case when json_extract_path_text(_airbyte_data, 'timestamp_col', true) != '' then json_extract_path_text(_airbyte_data, 'timestamp_col', true) end as timestamp_col, - case when json_extract_path_text(_airbyte_data, 'HKD@spƩƧiƤl & characters', true) != '' then json_extract_path_text(_airbyte_data, 'HKD@spƩƧiƤl & characters', true) end as "hkd@spƩƧiƤl & characters", - case when json_extract_path_text(_airbyte_data, 'NZD', true) != '' then json_extract_path_text(_airbyte_data, 'NZD', true) end as nzd, - case when json_extract_path_text(_airbyte_data, 'USD', true) != '' then json_extract_path_text(_airbyte_data, 'USD', true) end as usd, - case when json_extract_path_text(_airbyte_data, 'column`_''with"_quotes', true) != '' then json_extract_path_text(_airbyte_data, 'column`_''with"_quotes', true) end as "column`_'with""_quotes", + case when _airbyte_data."id" != '' then _airbyte_data."id" end as id, + case when _airbyte_data."currency" != '' then _airbyte_data."currency" end as currency, + case when _airbyte_data."new_column" != '' then _airbyte_data."new_column" end as new_column, + case when _airbyte_data."date" != '' then _airbyte_data."date" end as date, + case when _airbyte_data."timestamp_col" != '' then _airbyte_data."timestamp_col" end as timestamp_col, + case when _airbyte_data."HKD@spƩƧiƤl & characters" != '' then _airbyte_data."HKD@spƩƧiƤl & characters" end as "hkd@spƩƧiƤl & characters", + case when _airbyte_data."NZD" != '' then _airbyte_data."NZD" end as nzd, + case when _airbyte_data."USD" != '' then _airbyte_data."USD" end as usd, + case when _airbyte_data."column`_'with""_quotes" != '' then _airbyte_data."column`_'with""_quotes" end as "column`_'with""_quotes", _airbyte_ab_id, _airbyte_emitted_at, getdate() as _airbyte_normalized_at @@ -36,14 +36,14 @@ select cast(id as float ) as id, - cast(currency as varchar) as currency, + cast(currency as text) as currency, cast(new_column as float ) as new_column, - cast(nullif(date, '') as + cast(nullif(date::varchar, '') as date ) as date, - cast(nullif(timestamp_col, '') as + cast(nullif(timestamp_col::varchar, '') as timestamp with time zone ) as timestamp_col, cast("hkd@spƩƧiƤl & characters" as @@ -55,7 +55,7 @@ select cast(usd as float ) as usd, - cast("column`_'with""_quotes" as varchar) as "column`_'with""_quotes", + cast("column`_'with""_quotes" as text) as "column`_'with""_quotes", _airbyte_ab_id, _airbyte_emitted_at, getdate() as _airbyte_normalized_at @@ -67,7 +67,7 @@ where 1 = 1 -- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__exchange_rate_ab2 select - md5(cast(coalesce(cast(id as varchar), '') || '-' || coalesce(cast(currency as varchar), '') || '-' || coalesce(cast(new_column as varchar), '') || '-' || coalesce(cast(date as varchar), '') || '-' || coalesce(cast(timestamp_col as varchar), '') || '-' || coalesce(cast("hkd@spƩƧiƤl & characters" as varchar), '') || '-' || coalesce(cast(nzd as varchar), '') || '-' || coalesce(cast(usd as varchar), '') || '-' || coalesce(cast("column`_'with""_quotes" as varchar), '') as varchar)) as _airbyte_exchange_rate_hashid, + md5(cast(coalesce(cast(id as text), '') || '-' || coalesce(cast(currency as text), '') || '-' || coalesce(cast(new_column as text), '') || '-' || coalesce(cast(date as text), '') || '-' || coalesce(cast(timestamp_col as text), '') || '-' || coalesce(cast("hkd@spƩƧiƤl & characters" as text), '') || '-' || coalesce(cast(nzd as text), '') || '-' || coalesce(cast(usd as text), '') || '-' || coalesce(cast("column`_'with""_quotes" as text), '') as text)) as _airbyte_exchange_rate_hashid, tmp.* from __dbt__cte__exchange_rate_ab2 tmp -- exchange_rate diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/third_output/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/third_output/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql index 797b5a85940c..c7ee5d552f22 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/third_output/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/third_output/airbyte_views/test_normalization/dedup_exchange_rate_stg.sql @@ -7,14 +7,14 @@ with __dbt__cte__dedup_exchange_rate_ab1 as ( -- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema -- depends_on: "integrationtests".test_normalization._airbyte_raw_dedup_exchange_rate select - case when json_extract_path_text(_airbyte_data, 'id', true) != '' then json_extract_path_text(_airbyte_data, 'id', true) end as id, - case when json_extract_path_text(_airbyte_data, 'currency', true) != '' then json_extract_path_text(_airbyte_data, 'currency', true) end as currency, - case when json_extract_path_text(_airbyte_data, 'new_column', true) != '' then json_extract_path_text(_airbyte_data, 'new_column', true) end as new_column, - case when json_extract_path_text(_airbyte_data, 'date', true) != '' then json_extract_path_text(_airbyte_data, 'date', true) end as date, - case when json_extract_path_text(_airbyte_data, 'timestamp_col', true) != '' then json_extract_path_text(_airbyte_data, 'timestamp_col', true) end as timestamp_col, - case when json_extract_path_text(_airbyte_data, 'HKD@spƩƧiƤl & characters', true) != '' then json_extract_path_text(_airbyte_data, 'HKD@spƩƧiƤl & characters', true) end as "hkd@spƩƧiƤl & characters", - case when json_extract_path_text(_airbyte_data, 'NZD', true) != '' then json_extract_path_text(_airbyte_data, 'NZD', true) end as nzd, - case when json_extract_path_text(_airbyte_data, 'USD', true) != '' then json_extract_path_text(_airbyte_data, 'USD', true) end as usd, + case when _airbyte_data."id" != '' then _airbyte_data."id" end as id, + case when _airbyte_data."currency" != '' then _airbyte_data."currency" end as currency, + case when _airbyte_data."new_column" != '' then _airbyte_data."new_column" end as new_column, + case when _airbyte_data."date" != '' then _airbyte_data."date" end as date, + case when _airbyte_data."timestamp_col" != '' then _airbyte_data."timestamp_col" end as timestamp_col, + case when _airbyte_data."HKD@spƩƧiƤl & characters" != '' then _airbyte_data."HKD@spƩƧiƤl & characters" end as "hkd@spƩƧiƤl & characters", + case when _airbyte_data."NZD" != '' then _airbyte_data."NZD" end as nzd, + case when _airbyte_data."USD" != '' then _airbyte_data."USD" end as usd, _airbyte_ab_id, _airbyte_emitted_at, getdate() as _airbyte_normalized_at @@ -30,14 +30,14 @@ select cast(id as float ) as id, - cast(currency as varchar) as currency, + cast(currency as text) as currency, cast(new_column as float ) as new_column, - cast(nullif(date, '') as + cast(nullif(date::varchar, '') as date ) as date, - cast(nullif(timestamp_col, '') as + cast(nullif(timestamp_col::varchar, '') as timestamp with time zone ) as timestamp_col, cast("hkd@spƩƧiƤl & characters" as @@ -59,7 +59,7 @@ where 1 = 1 )-- SQL model to build a hash column based on the values of this record -- depends_on: __dbt__cte__dedup_exchange_rate_ab2 select - md5(cast(coalesce(cast(id as varchar), '') || '-' || coalesce(cast(currency as varchar), '') || '-' || coalesce(cast(new_column as varchar), '') || '-' || coalesce(cast(date as varchar), '') || '-' || coalesce(cast(timestamp_col as varchar), '') || '-' || coalesce(cast("hkd@spƩƧiƤl & characters" as varchar), '') || '-' || coalesce(cast(nzd as varchar), '') || '-' || coalesce(cast(usd as varchar), '') as varchar)) as _airbyte_dedup_exchange_rate_hashid, + md5(cast(coalesce(cast(id as text), '') || '-' || coalesce(cast(currency as text), '') || '-' || coalesce(cast(new_column as text), '') || '-' || coalesce(cast(date as text), '') || '-' || coalesce(cast(timestamp_col as text), '') || '-' || coalesce(cast("hkd@spƩƧiƤl & characters" as text), '') || '-' || coalesce(cast(nzd as text), '') || '-' || coalesce(cast(usd as text), '') as text)) as _airbyte_dedup_exchange_rate_hashid, tmp.* from __dbt__cte__dedup_exchange_rate_ab2 tmp -- dedup_exchange_rate diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_ctes/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_AB1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_ctes/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_AB1.sql index 7a583b5f16f7..772f1976f2c6 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_ctes/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_AB1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_ctes/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_AB1.sql @@ -16,5 +16,5 @@ select from {{ source('TEST_NORMALIZATION', '_AIRBYTE_RAW_NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES') }} as table_alias -- NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES where 1 = 1 -{{ incremental_clause('_AIRBYTE_EMITTED_AT') }} +{{ incremental_clause('_AIRBYTE_EMITTED_AT', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_ctes/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_AB2.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_ctes/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_AB2.sql index ff84f05bcf23..fd49a8524a64 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_ctes/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_AB2.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_ctes/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_AB2.sql @@ -16,5 +16,5 @@ select from {{ ref('NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_AB1') }} -- NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES where 1 = 1 -{{ incremental_clause('_AIRBYTE_EMITTED_AT') }} +{{ incremental_clause('_AIRBYTE_EMITTED_AT', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_ctes/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_PARTITION_AB1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_ctes/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_PARTITION_AB1.sql index bd6a0678a36a..e6c344e6308d 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_ctes/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_PARTITION_AB1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_ctes/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_PARTITION_AB1.sql @@ -16,5 +16,5 @@ from {{ ref('NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_SCD') -- PARTITION at nested_stream_with_complex_columns_resulting_into_long_names/partition where 1 = 1 and PARTITION is not null -{{ incremental_clause('_AIRBYTE_EMITTED_AT') }} +{{ incremental_clause('_AIRBYTE_EMITTED_AT', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_ctes/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_PARTITION_DATA_AB1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_ctes/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_PARTITION_DATA_AB1.sql index d7c93aa351da..050da953efdd 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_ctes/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_PARTITION_DATA_AB1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_ctes/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_PARTITION_DATA_AB1.sql @@ -17,5 +17,5 @@ from {{ ref('NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_PARTIT {{ cross_join_unnest('PARTITION', 'DATA') }} where 1 = 1 and DATA is not null -{{ incremental_clause('_AIRBYTE_EMITTED_AT') }} +{{ incremental_clause('_AIRBYTE_EMITTED_AT', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_ctes/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_PARTITION_DOUBLE_ARRAY_DATA_AB1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_ctes/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_PARTITION_DOUBLE_ARRAY_DATA_AB1.sql index d887fcbbac97..13b208068c10 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_ctes/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_PARTITION_DOUBLE_ARRAY_DATA_AB1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_ctes/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_PARTITION_DOUBLE_ARRAY_DATA_AB1.sql @@ -17,5 +17,5 @@ from {{ ref('NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_PARTIT {{ cross_join_unnest('PARTITION', 'DOUBLE_ARRAY_DATA') }} where 1 = 1 and DOUBLE_ARRAY_DATA is not null -{{ incremental_clause('_AIRBYTE_EMITTED_AT') }} +{{ incremental_clause('_AIRBYTE_EMITTED_AT', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_incremental/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_incremental/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES.sql index e78648f62415..110c17ef216d 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_incremental/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_incremental/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES.sql @@ -19,5 +19,5 @@ from {{ ref('NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_SCD') -- NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES from {{ source('TEST_NORMALIZATION', '_AIRBYTE_RAW_NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES') }} where 1 = 1 and _AIRBYTE_ACTIVE_ROW = 1 -{{ incremental_clause('_AIRBYTE_EMITTED_AT') }} +{{ incremental_clause('_AIRBYTE_EMITTED_AT', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_incremental/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_PARTITION.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_incremental/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_PARTITION.sql index 29b0545db886..3dda7efc9c61 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_incremental/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_PARTITION.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_incremental/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_PARTITION.sql @@ -16,5 +16,5 @@ select from {{ ref('NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_PARTITION_AB3') }} -- PARTITION at nested_stream_with_complex_columns_resulting_into_long_names/partition from {{ ref('NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_SCD') }} where 1 = 1 -{{ incremental_clause('_AIRBYTE_EMITTED_AT') }} +{{ incremental_clause('_AIRBYTE_EMITTED_AT', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_incremental/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_PARTITION_DATA.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_incremental/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_PARTITION_DATA.sql index 721c594cbead..526c8b658f19 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_incremental/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_PARTITION_DATA.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_incremental/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_PARTITION_DATA.sql @@ -15,5 +15,5 @@ select from {{ ref('NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_PARTITION_DATA_AB3') }} -- DATA at nested_stream_with_complex_columns_resulting_into_long_names/partition/DATA from {{ ref('NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_PARTITION') }} where 1 = 1 -{{ incremental_clause('_AIRBYTE_EMITTED_AT') }} +{{ incremental_clause('_AIRBYTE_EMITTED_AT', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_incremental/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_PARTITION_DOUBLE_ARRAY_DATA.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_incremental/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_PARTITION_DOUBLE_ARRAY_DATA.sql index 11cbbf596cf9..c46547e9a624 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_incremental/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_PARTITION_DOUBLE_ARRAY_DATA.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_incremental/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_PARTITION_DOUBLE_ARRAY_DATA.sql @@ -15,5 +15,5 @@ select from {{ ref('NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_PARTITION_DOUBLE_ARRAY_DATA_AB3') }} -- DOUBLE_ARRAY_DATA at nested_stream_with_complex_columns_resulting_into_long_names/partition/double_array_data from {{ ref('NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_PARTITION') }} where 1 = 1 -{{ incremental_clause('_AIRBYTE_EMITTED_AT') }} +{{ incremental_clause('_AIRBYTE_EMITTED_AT', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_incremental/scd/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_SCD.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_incremental/scd/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_SCD.sql index 9435ebaf2bc1..7b46e390d057 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_incremental/scd/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_SCD.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_incremental/scd/TEST_NORMALIZATION/NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_SCD.sql @@ -2,7 +2,53 @@ cluster_by = ["_AIRBYTE_ACTIVE_ROW", "_AIRBYTE_UNIQUE_KEY_SCD", "_AIRBYTE_EMITTED_AT"], unique_key = "_AIRBYTE_UNIQUE_KEY_SCD", schema = "TEST_NORMALIZATION", - post_hook = ["drop view _AIRBYTE_TEST_NORMALIZATION.NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_STG"], + post_hook = [" + {% + set final_table_relation = adapter.get_relation( + database=this.database, + schema=this.schema, + identifier='NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES' + ) + %} + {# + If the final table doesn't exist, then obviously we can't delete anything from it. + Also, after a reset, the final table is created without the _airbyte_unique_key column (this column is created during the first sync) + So skip this deletion if the column doesn't exist. (in this case, the table is guaranteed to be empty anyway) + #} + {% + if final_table_relation is not none and '_AIRBYTE_UNIQUE_KEY' in adapter.get_columns_in_relation(final_table_relation)|map(attribute='name') + %} + -- Delete records which are no longer active: + -- This query is equivalent, but the left join version is more performant: + -- delete from final_table where unique_key in ( + -- select unique_key from scd_table where 1 = 1 + -- ) and unique_key not in ( + -- select unique_key from scd_table where active_row = 1 + -- ) + -- We're incremental against normalized_at rather than emitted_at because we need to fetch the SCD + -- entries that were _updated_ recently. This is because a deleted record will have an SCD record + -- which was emitted a long time ago, but recently re-normalized to have active_row = 0. + delete from {{ final_table_relation }} where {{ final_table_relation }}._AIRBYTE_UNIQUE_KEY in ( + select recent_records.unique_key + from ( + select distinct _AIRBYTE_UNIQUE_KEY as unique_key + from {{ this }} + where 1=1 {{ incremental_clause('_AIRBYTE_NORMALIZED_AT', this.schema + '.' + adapter.quote('NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES')) }} + ) recent_records + left join ( + select _AIRBYTE_UNIQUE_KEY as unique_key, count(_AIRBYTE_UNIQUE_KEY) as active_count + from {{ this }} + where _AIRBYTE_ACTIVE_ROW = 1 {{ incremental_clause('_AIRBYTE_NORMALIZED_AT', this.schema + '.' + adapter.quote('NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES')) }} + group by _AIRBYTE_UNIQUE_KEY + ) active_counts + on recent_records.unique_key = active_counts.unique_key + where active_count is null or active_count = 0 + ) + {% else %} + -- We have to have a non-empty query, so just do a noop delete + delete from {{ this }} where 1=0 + {% endif %} + ","drop view _AIRBYTE_TEST_NORMALIZATION.NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_STG"], tags = [ "top-level" ] ) }} -- depends_on: ref('NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_STG') @@ -15,7 +61,7 @@ new_data as ( from {{ ref('NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES_STG') }} -- NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES from {{ source('TEST_NORMALIZATION', '_AIRBYTE_RAW_NESTED_STREAM_WITH_COMPLEX_COLUMNS_RESULTING_INTO_LONG_NAMES') }} where 1 = 1 - {{ incremental_clause('_AIRBYTE_EMITTED_AT') }} + {{ incremental_clause('_AIRBYTE_EMITTED_AT', this) }} ), new_data_ids as ( -- build a subset of _AIRBYTE_UNIQUE_KEY from rows that are new diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/models/generated/airbyte_ctes/TEST_NORMALIZATION/DEDUP_EXCHANGE_RATE_AB1.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/models/generated/airbyte_ctes/TEST_NORMALIZATION/DEDUP_EXCHANGE_RATE_AB1.sql index 64750e4ebfa8..06be4a0eaa2f 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/models/generated/airbyte_ctes/TEST_NORMALIZATION/DEDUP_EXCHANGE_RATE_AB1.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/models/generated/airbyte_ctes/TEST_NORMALIZATION/DEDUP_EXCHANGE_RATE_AB1.sql @@ -21,5 +21,5 @@ select from {{ source('TEST_NORMALIZATION', '_AIRBYTE_RAW_DEDUP_EXCHANGE_RATE') }} as table_alias -- DEDUP_EXCHANGE_RATE where 1 = 1 -{{ incremental_clause('_AIRBYTE_EMITTED_AT') }} +{{ incremental_clause('_AIRBYTE_EMITTED_AT', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/models/generated/airbyte_ctes/TEST_NORMALIZATION/DEDUP_EXCHANGE_RATE_AB2.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/models/generated/airbyte_ctes/TEST_NORMALIZATION/DEDUP_EXCHANGE_RATE_AB2.sql index 4a89013a88ee..f3a40af778cc 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/models/generated/airbyte_ctes/TEST_NORMALIZATION/DEDUP_EXCHANGE_RATE_AB2.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/models/generated/airbyte_ctes/TEST_NORMALIZATION/DEDUP_EXCHANGE_RATE_AB2.sql @@ -29,5 +29,5 @@ select from {{ ref('DEDUP_EXCHANGE_RATE_AB1') }} -- DEDUP_EXCHANGE_RATE where 1 = 1 -{{ incremental_clause('_AIRBYTE_EMITTED_AT') }} +{{ incremental_clause('_AIRBYTE_EMITTED_AT', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/models/generated/airbyte_incremental/TEST_NORMALIZATION/DEDUP_EXCHANGE_RATE.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/models/generated/airbyte_incremental/TEST_NORMALIZATION/DEDUP_EXCHANGE_RATE.sql index 0cf5e6b3819a..0663a8d251e4 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/models/generated/airbyte_incremental/TEST_NORMALIZATION/DEDUP_EXCHANGE_RATE.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/models/generated/airbyte_incremental/TEST_NORMALIZATION/DEDUP_EXCHANGE_RATE.sql @@ -24,5 +24,5 @@ from {{ ref('DEDUP_EXCHANGE_RATE_SCD') }} -- DEDUP_EXCHANGE_RATE from {{ source('TEST_NORMALIZATION', '_AIRBYTE_RAW_DEDUP_EXCHANGE_RATE') }} where 1 = 1 and _AIRBYTE_ACTIVE_ROW = 1 -{{ incremental_clause('_AIRBYTE_EMITTED_AT') }} +{{ incremental_clause('_AIRBYTE_EMITTED_AT', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/models/generated/airbyte_incremental/scd/TEST_NORMALIZATION/DEDUP_EXCHANGE_RATE_SCD.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/models/generated/airbyte_incremental/scd/TEST_NORMALIZATION/DEDUP_EXCHANGE_RATE_SCD.sql index 688926bdcab0..13f493601511 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/models/generated/airbyte_incremental/scd/TEST_NORMALIZATION/DEDUP_EXCHANGE_RATE_SCD.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/models/generated/airbyte_incremental/scd/TEST_NORMALIZATION/DEDUP_EXCHANGE_RATE_SCD.sql @@ -2,7 +2,53 @@ cluster_by = ["_AIRBYTE_ACTIVE_ROW", "_AIRBYTE_UNIQUE_KEY_SCD", "_AIRBYTE_EMITTED_AT"], unique_key = "_AIRBYTE_UNIQUE_KEY_SCD", schema = "TEST_NORMALIZATION", - post_hook = ["drop view _AIRBYTE_TEST_NORMALIZATION.DEDUP_EXCHANGE_RATE_STG"], + post_hook = [" + {% + set final_table_relation = adapter.get_relation( + database=this.database, + schema=this.schema, + identifier='DEDUP_EXCHANGE_RATE' + ) + %} + {# + If the final table doesn't exist, then obviously we can't delete anything from it. + Also, after a reset, the final table is created without the _airbyte_unique_key column (this column is created during the first sync) + So skip this deletion if the column doesn't exist. (in this case, the table is guaranteed to be empty anyway) + #} + {% + if final_table_relation is not none and '_AIRBYTE_UNIQUE_KEY' in adapter.get_columns_in_relation(final_table_relation)|map(attribute='name') + %} + -- Delete records which are no longer active: + -- This query is equivalent, but the left join version is more performant: + -- delete from final_table where unique_key in ( + -- select unique_key from scd_table where 1 = 1 + -- ) and unique_key not in ( + -- select unique_key from scd_table where active_row = 1 + -- ) + -- We're incremental against normalized_at rather than emitted_at because we need to fetch the SCD + -- entries that were _updated_ recently. This is because a deleted record will have an SCD record + -- which was emitted a long time ago, but recently re-normalized to have active_row = 0. + delete from {{ final_table_relation }} where {{ final_table_relation }}._AIRBYTE_UNIQUE_KEY in ( + select recent_records.unique_key + from ( + select distinct _AIRBYTE_UNIQUE_KEY as unique_key + from {{ this }} + where 1=1 {{ incremental_clause('_AIRBYTE_NORMALIZED_AT', this.schema + '.' + adapter.quote('DEDUP_EXCHANGE_RATE')) }} + ) recent_records + left join ( + select _AIRBYTE_UNIQUE_KEY as unique_key, count(_AIRBYTE_UNIQUE_KEY) as active_count + from {{ this }} + where _AIRBYTE_ACTIVE_ROW = 1 {{ incremental_clause('_AIRBYTE_NORMALIZED_AT', this.schema + '.' + adapter.quote('DEDUP_EXCHANGE_RATE')) }} + group by _AIRBYTE_UNIQUE_KEY + ) active_counts + on recent_records.unique_key = active_counts.unique_key + where active_count is null or active_count = 0 + ) + {% else %} + -- We have to have a non-empty query, so just do a noop delete + delete from {{ this }} where 1=0 + {% endif %} + ","drop view _AIRBYTE_TEST_NORMALIZATION.DEDUP_EXCHANGE_RATE_STG"], tags = [ "top-level" ] ) }} -- depends_on: ref('DEDUP_EXCHANGE_RATE_STG') @@ -15,7 +61,7 @@ new_data as ( from {{ ref('DEDUP_EXCHANGE_RATE_STG') }} -- DEDUP_EXCHANGE_RATE from {{ source('TEST_NORMALIZATION', '_AIRBYTE_RAW_DEDUP_EXCHANGE_RATE') }} where 1 = 1 - {{ incremental_clause('_AIRBYTE_EMITTED_AT') }} + {{ incremental_clause('_AIRBYTE_EMITTED_AT', this) }} ), new_data_ids as ( -- build a subset of _AIRBYTE_UNIQUE_KEY from rows that are new diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/models/generated/airbyte_views/TEST_NORMALIZATION/DEDUP_EXCHANGE_RATE_STG.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/models/generated/airbyte_views/TEST_NORMALIZATION/DEDUP_EXCHANGE_RATE_STG.sql index e40d4e943eb7..d810a79652be 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/models/generated/airbyte_views/TEST_NORMALIZATION/DEDUP_EXCHANGE_RATE_STG.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/models/generated/airbyte_views/TEST_NORMALIZATION/DEDUP_EXCHANGE_RATE_STG.sql @@ -21,5 +21,5 @@ select from {{ ref('DEDUP_EXCHANGE_RATE_AB2') }} tmp -- DEDUP_EXCHANGE_RATE where 1 = 1 -{{ incremental_clause('_AIRBYTE_EMITTED_AT') }} +{{ incremental_clause('_AIRBYTE_EMITTED_AT', this) }} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/resources/test_nested_streams/data_input/catalog.json b/airbyte-integrations/bases/base-normalization/integration_tests/resources/test_nested_streams/data_input/catalog.json index 0dcf0280543c..cbab9cf3aa20 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/resources/test_nested_streams/data_input/catalog.json +++ b/airbyte-integrations/bases/base-normalization/integration_tests/resources/test_nested_streams/data_input/catalog.json @@ -272,6 +272,29 @@ "sync_mode": "incremental", "cursor_field": [], "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "arrays", + "json_schema": { + "type": ["null", "object"], + "properties": { + "array_of_strings": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + } + } + }, + "supported_sync_modes": ["full_refresh"], + "source_defined_cursor": true, + "default_cursor_field": [] + }, + "sync_mode": "full_refresh", + "cursor_field": [], + "destination_sync_mode": "overwrite", + "primary_key": [] } ] } diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/resources/test_nested_streams/data_input/messages.txt b/airbyte-integrations/bases/base-normalization/integration_tests/resources/test_nested_streams/data_input/messages.txt index 78c9ba9121f9..ef61a0fa12f1 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/resources/test_nested_streams/data_input/messages.txt +++ b/airbyte-integrations/bases/base-normalization/integration_tests/resources/test_nested_streams/data_input/messages.txt @@ -15,3 +15,4 @@ {"type":"RECORD","record":{"stream":"unnest_alias","data":{"id":1, "children": [{"ab_id": 1, "owner": {"owner_id": 1, "column`_'with\"_quotes": [ {"currency": "EUR" } ]}},{"ab_id": 2, "owner": {"owner_id": 2, "column`_'with\"_quotes": [ {"currency": "EUR" } ]}}]},"emitted_at":1623861660}} {"type":"RECORD","record":{"stream":"unnest_alias","data":{"id":2, "children": [{"ab_id": 3, "owner": {"owner_id": 3, "column`_'with\"_quotes": [ {"currency": "EUR" } ]}},{"ab_id": 4, "owner": {"owner_id": 4, "column`_'with\"_quotes": [ {"currency": "EUR" } ]}}]},"emitted_at":1623861660}} +{"type":"RECORD","record":{"stream":"arrays","emitted_at":1602638599000,"data":{"array_of_strings":["string1","string2","string3"]}}} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/resources/test_simple_streams/data_input/catalog_schema_change.json b/airbyte-integrations/bases/base-normalization/integration_tests/resources/test_simple_streams/data_input/catalog_schema_change.json index ac8cea023214..a54e89c4ff2e 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/resources/test_simple_streams/data_input/catalog_schema_change.json +++ b/airbyte-integrations/bases/base-normalization/integration_tests/resources/test_simple_streams/data_input/catalog_schema_change.json @@ -119,6 +119,38 @@ "cursor_field": [], "destination_sync_mode": "append_dedup", "primary_key": [["id"]] + }, + { + "stream": { + "name": "dedup_cdc_excluded", + "json_schema": { + "type": ["null", "object"], + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": ["string", "null"] + }, + "_ab_cdc_lsn": { + "type": ["null", "number"] + }, + "_ab_cdc_updated_at": { + "type": ["null", "number"] + }, + "_ab_cdc_deleted_at": { + "type": ["null", "number"] + } + } + }, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": [] + }, + "sync_mode": "incremental", + "cursor_field": ["_ab_cdc_lsn"], + "destination_sync_mode": "append_dedup", + "primary_key": [["id"]] } ] } diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/resources/test_simple_streams/data_input/messages.txt b/airbyte-integrations/bases/base-normalization/integration_tests/resources/test_simple_streams/data_input/messages.txt index c4b5b4d8543a..abec18487360 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/resources/test_simple_streams/data_input/messages.txt +++ b/airbyte-integrations/bases/base-normalization/integration_tests/resources/test_simple_streams/data_input/messages.txt @@ -24,6 +24,7 @@ {"type": "RECORD", "record": {"stream": "dedup_exchange_rate", "emitted_at": 1602637991100, "data": { "id": 5, "currency": "USD", "NZD": 0.01, "HKD@spƩƧiƤl & characters": 8.12, "HKD_special___characters": "column name collision?", "column`_'with\"_quotes":"ma\"z`d'a"}}} {"type": "RECORD", "record": {"stream": "dedup_exchange_rate", "emitted_at": 1602637991200, "data": { "id": 5, "currency": "USD", "NZD": 0.01, "HKD@spƩƧiƤl & characters": 9.23, "HKD_special___characters": "column name collision?", "column`_'with\"_quotes":"ma\"z`d'a"}}} +# Note that some of the IDs are inserted and then deleted; this should be reflected as a single row in the SCD model with _airbyte_active_row set to 0. {"type":"RECORD","record":{"stream":"dedup_cdc_excluded","data":{"id":1,"name":"mazda","_ab_cdc_updated_at":1623849130530,"_ab_cdc_lsn":26971624,"_ab_cdc_deleted_at":null},"emitted_at":1623859926}} {"type":"RECORD","record":{"stream":"dedup_cdc_excluded","data":{"id":2,"name":"toyata","_ab_cdc_updated_at":1623849130549,"_ab_cdc_lsn":26971624,"_ab_cdc_deleted_at":null},"emitted_at":1623859926}} {"type":"RECORD","record":{"stream":"dedup_cdc_excluded","data":{"id":4,"name":"bmw","_ab_cdc_updated_at":1623849314535,"_ab_cdc_lsn":26974776,"_ab_cdc_deleted_at":null},"emitted_at":1623860160}} @@ -31,6 +32,7 @@ {"type":"RECORD","record":{"stream":"dedup_cdc_excluded","data":{"id":4,"name":null,"_ab_cdc_updated_at":1623849314791,"_ab_cdc_lsn":26975440,"_ab_cdc_deleted_at":1623849314791},"emitted_at":1623860160}} {"type":"RECORD","record":{"stream":"dedup_cdc_excluded","data":{"id":6,"name":"opel","_ab_cdc_updated_at":1623850868109,"_ab_cdc_lsn":27009440,"_ab_cdc_deleted_at":null},"emitted_at":1623861660}} {"type":"RECORD","record":{"stream":"dedup_cdc_excluded","data":{"id":7,"name":"lotus","_ab_cdc_updated_at":1623850868237,"_ab_cdc_lsn":27010048,"_ab_cdc_deleted_at":null},"emitted_at":1623861660}} +# messages_incremental.txt has a dedup_cdc_excluded record with emitted_at=1623860160, i.e. older than this record. If you delete/modify this record, make sure to maintain that relationship. {"type":"RECORD","record":{"stream":"dedup_cdc_excluded","data":{"id":6,"name":null,"_ab_cdc_updated_at":1623850868371,"_ab_cdc_lsn":27010232,"_ab_cdc_deleted_at":1623850868371},"emitted_at":1623861660}} {"type":"RECORD","record":{"stream":"pos_dedup_cdcx","data":{"id":1,"name":"mazda","_ab_cdc_updated_at":1623849130530,"_ab_cdc_lsn":26971624,"_ab_cdc_log_pos": 33274,"_ab_cdc_deleted_at":null},"emitted_at":1623859926}} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/resources/test_simple_streams/data_input/messages_incremental.txt b/airbyte-integrations/bases/base-normalization/integration_tests/resources/test_simple_streams/data_input/messages_incremental.txt index 0f4a6ee16d5e..98c8ae988e78 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/resources/test_simple_streams/data_input/messages_incremental.txt +++ b/airbyte-integrations/bases/base-normalization/integration_tests/resources/test_simple_streams/data_input/messages_incremental.txt @@ -1,20 +1,35 @@ +# Some records are duplicated from messages.txt - this mimics our "at-least-once" delivery policy. + +# Other records "go back in time", i.e. are new data but have an older emitted_at timestamp than some of the those duplicated records. +# (I think?) This mimics an interruption to normalization, such that some records were normalized but others were not. + +# These first records are old data. {"type": "RECORD", "record": {"stream": "exchange_rate", "emitted_at": 1602637990800, "data": { "id": 2, "currency": "EUR", "date": "", "timestamp_col": "", "NZD": 2.43, "HKD@spƩƧiƤl & characters": 5.4, "HKD_special___characters": "column name collision?", "column`_'with\"_quotes":"ma\"z`d'a"}}} {"type": "RECORD", "record": {"stream": "exchange_rate", "emitted_at": 1602637990900, "data": { "id": 3, "currency": "GBP", "NZD": 3.14, "HKD@spƩƧiƤl & characters": 9.2, "HKD_special___characters": "column name collision?", "column`_'with\"_quotes":"ma\"z`d'a"}}} +# These records are new data. {"type": "RECORD", "record": {"stream": "exchange_rate", "emitted_at": 1602650000000, "data": { "id": 2, "currency": "EUR", "NZD": 3.89, "HKD@spƩƧiƤl & characters": 14.05, "HKD_special___characters": "column name collision?", "column`_'with\"_quotes":"ma\"z`d'a"}}} {"type": "RECORD", "record": {"stream": "exchange_rate", "emitted_at": 1602650010000, "data": { "id": 4, "currency": "HKD", "NZD": 1.19, "HKD@spƩƧiƤl & characters": 0.01, "HKD_special___characters": "column name collision?", "column`_'with\"_quotes":"ma\"z`d'a"}}} {"type": "RECORD", "record": {"stream": "exchange_rate", "emitted_at": 1602650011000, "data": { "id": 1, "currency": "USD", "date": "2020-10-14", "timestamp_col": "2020-10-14T00:00:00.000-00", "NZD": 1.14, "HKD@spƩƧiƤl & characters": 9.5, "HKD_special___characters": "column name collision?", "column`_'with\"_quotes":"ma\"z`d'a"}}} {"type": "RECORD", "record": {"stream": "exchange_rate", "emitted_at": 1602650012000, "data": { "id": 5, "currency": "USD", "NZD": 0.01, "HKD@spƩƧiƤl & characters": 6.39, "HKD_special___characters": "column name collision?", "column`_'with\"_quotes":"ma\"z`d'a"}}} +# These first records are old data. {"type": "RECORD", "record": {"stream": "dedup_exchange_rate", "emitted_at": 1602637990800, "data": { "id": 2, "currency": "EUR", "date": "", "timestamp_col": "", "NZD": 2.43, "HKD@spƩƧiƤl & characters": 5.4, "HKD_special___characters": "column name collision?", "column`_'with\"_quotes":"ma\"z`d'a"}}} {"type": "RECORD", "record": {"stream": "dedup_exchange_rate", "emitted_at": 1602637990900, "data": { "id": 3, "currency": "GBP", "NZD": 3.14, "HKD@spƩƧiƤl & characters": 9.2, "HKD_special___characters": "column name collision?", "column`_'with\"_quotes":"ma\"z`d'a"}}} +# These records are new data. {"type": "RECORD", "record": {"stream": "dedup_exchange_rate", "emitted_at": 1602650000000, "data": { "id": 2, "currency": "EUR", "NZD": 3.89, "HKD@spƩƧiƤl & characters": 14.05, "HKD_special___characters": "column name collision?", "column`_'with\"_quotes":"ma\"z`d'a"}}} {"type": "RECORD", "record": {"stream": "dedup_exchange_rate", "emitted_at": 1602650010000, "data": { "id": 4, "currency": "HKD", "NZD": 1.19, "HKD@spƩƧiƤl & characters": 0.01, "HKD_special___characters": "column name collision?", "column`_'with\"_quotes":"ma\"z`d'a"}}} {"type": "RECORD", "record": {"stream": "dedup_exchange_rate", "emitted_at": 1602650011000, "data": { "id": 1, "currency": "USD", "date": "2020-10-14", "timestamp_col": "2020-10-14T00:00:00.000-00", "NZD": 1.14, "HKD@spƩƧiƤl & characters": 9.5, "HKD_special___characters": "column name collision?", "column`_'with\"_quotes":"ma\"z`d'a"}}} {"type": "RECORD", "record": {"stream": "dedup_exchange_rate", "emitted_at": 1602650012000, "data": { "id": 5, "currency": "USD", "NZD": 0.01, "HKD@spƩƧiƤl & characters": 6.39, "HKD_special___characters": "column name collision?", "column`_'with\"_quotes":"ma\"z`d'a"}}} +# All of these records are new data. +# This record has an _older_ emitted_at than the latest dedup_cdc_excluded record in messages.txt {"type":"RECORD","record":{"stream":"dedup_cdc_excluded","data":{"id":5,"name":"vw","column`_'with\"_quotes":"ma\"z`d'a","_ab_cdc_updated_at":1623849314663,"_ab_cdc_lsn":26975264,"_ab_cdc_deleted_at":null},"emitted_at":1623860160}} {"type":"RECORD","record":{"stream":"dedup_cdc_excluded","data":{"id":5,"name":null,"column`_'with\"_quotes":"ma\"z`d'a","_ab_cdc_updated_at":1623900000000,"_ab_cdc_lsn":28010252,"_ab_cdc_deleted_at":1623900000000},"emitted_at":1623900000000}} +# Previously we had a bug where we only respected deletions from the most recent _airbyte_emitted_at. This message tests that ID 5 is still correctly deleted (i.e. marked with _airbyte_active_row = 0). +# This record is also deleted in messages_schema_change.txt. +{"type":"RECORD","record":{"stream":"dedup_cdc_excluded","data":{"id":8,"name":"ford","column`_'with\"_quotes":"ma\"z`d'a","_ab_cdc_updated_at":1624000000000,"_ab_cdc_lsn":29010252,"_ab_cdc_deleted_at":null},"emitted_at":1624000000000}} +# All of these records are old data. {"type":"RECORD","record":{"stream":"pos_dedup_cdcx","data":{"id":1,"name":"mazda","_ab_cdc_updated_at":1623849130530,"_ab_cdc_lsn":26971624,"_ab_cdc_log_pos": 33274,"_ab_cdc_deleted_at":null},"emitted_at":1623859926}} {"type":"RECORD","record":{"stream":"pos_dedup_cdcx","data":{"id":2,"name":"toyata","_ab_cdc_updated_at":1623849130549,"_ab_cdc_lsn":26971624,"_ab_cdc_log_pos": 33275,"_ab_cdc_deleted_at":null},"emitted_at":1623859926}} {"type":"RECORD","record":{"stream":"pos_dedup_cdcx","data":{"id":2,"name":"bmw","_ab_cdc_updated_at":1623849314535,"_ab_cdc_lsn":26974776,"_ab_cdc_log_pos": 33278,"_ab_cdc_deleted_at":null},"emitted_at":1623860160}} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/resources/test_simple_streams/data_input/messages_schema_change.txt b/airbyte-integrations/bases/base-normalization/integration_tests/resources/test_simple_streams/data_input/messages_schema_change.txt index ebe17b33d6e7..7190fe88bc35 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/resources/test_simple_streams/data_input/messages_schema_change.txt +++ b/airbyte-integrations/bases/base-normalization/integration_tests/resources/test_simple_streams/data_input/messages_schema_change.txt @@ -11,3 +11,6 @@ {"type":"RECORD","record":{"stream":"renamed_dedup_cdc_excluded","data":{"id":8,"name":"vw","column`_'with\"_quotes":"ma\"z`d'a","_ab_cdc_updated_at":1623949314663,"_ab_cdc_lsn":26985264,"_ab_cdc_deleted_at":null},"emitted_at":1623960160}} {"type":"RECORD","record":{"stream":"renamed_dedup_cdc_excluded","data":{"id":9,"name":"opel","column`_'with\"_quotes":"ma\"z`d'a","_ab_cdc_updated_at":1623950868109,"_ab_cdc_lsn":28009440,"_ab_cdc_deleted_at":null},"emitted_at":1623961660}} {"type":"RECORD","record":{"stream":"renamed_dedup_cdc_excluded","data":{"id":9,"name":null,"column`_'with\"_quotes":"ma\"z`d'a","_ab_cdc_updated_at":1623950868371,"_ab_cdc_lsn":28010232,"_ab_cdc_deleted_at":1623950868371},"emitted_at":1623961660}} + +# This message tests the ability to delete a record which was inserted in a previous sync. See messages_incremental.txt for how it was inserted. +{"type":"RECORD","record":{"stream":"dedup_cdc_excluded","data":{"id":8,"name":"ford","column`_'with\"_quotes":"ma\"z`d'a","_ab_cdc_updated_at":1625000000000,"_ab_cdc_lsn":29020252,"_ab_cdc_deleted_at":1625000000000},"emitted_at":1625000000000}} diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/resources/test_simple_streams/dbt_test_config/dbt_data_tests_tmp_incremental/simple_streams_second_run_row_counts.sql b/airbyte-integrations/bases/base-normalization/integration_tests/resources/test_simple_streams/dbt_test_config/dbt_data_tests_tmp_incremental/simple_streams_second_run_row_counts.sql index ca5cdfa4fc40..8a6a3bd7486d 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/resources/test_simple_streams/dbt_test_config/dbt_data_tests_tmp_incremental/simple_streams_second_run_row_counts.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/resources/test_simple_streams/dbt_test_config/dbt_data_tests_tmp_incremental/simple_streams_second_run_row_counts.sql @@ -18,10 +18,10 @@ union all union all - select distinct '_airbyte_raw_dedup_cdc_excluded' as label, count(*) as row_count, 2 as expected_count + select distinct '_airbyte_raw_dedup_cdc_excluded' as label, count(*) as row_count, 3 as expected_count from {{ source('test_normalization', '_airbyte_raw_dedup_cdc_excluded') }} union all - select distinct 'dedup_cdc_excluded_scd' as label, count(*) as row_count, 9 as expected_count + select distinct 'dedup_cdc_excluded_scd' as label, count(*) as row_count, 10 as expected_count from {{ ref('dedup_cdc_excluded_scd') }} union all select distinct 'dedup_cdc_excluded' as label, count(*) as row_count, 4 as expected_count diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/resources/test_simple_streams/dbt_test_config/dbt_data_tests_tmp_schema_change/simple_streams_third_run_row_counts.sql b/airbyte-integrations/bases/base-normalization/integration_tests/resources/test_simple_streams/dbt_test_config/dbt_data_tests_tmp_schema_change/simple_streams_third_run_row_counts.sql index cb886df680e9..bbf2fd047b44 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/resources/test_simple_streams/dbt_test_config/dbt_data_tests_tmp_schema_change/simple_streams_third_run_row_counts.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/resources/test_simple_streams/dbt_test_config/dbt_data_tests_tmp_schema_change/simple_streams_third_run_row_counts.sql @@ -18,13 +18,13 @@ union all union all - select distinct '_airbyte_raw_dedup_cdc_excluded' as label, count(*) as row_count, 2 as expected_count + select distinct '_airbyte_raw_dedup_cdc_excluded' as label, count(*) as row_count, 4 as expected_count from test_normalization._airbyte_raw_dedup_cdc_excluded union all - select distinct 'dedup_cdc_excluded_scd' as label, count(*) as row_count, 9 as expected_count + select distinct 'dedup_cdc_excluded_scd' as label, count(*) as row_count, 11 as expected_count from test_normalization.dedup_cdc_excluded_scd union all - select distinct 'dedup_cdc_excluded' as label, count(*) as row_count, 4 as expected_count + select distinct 'dedup_cdc_excluded' as label, count(*) as row_count, 3 as expected_count from test_normalization.dedup_cdc_excluded ) select * diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/test_ephemeral.py b/airbyte-integrations/bases/base-normalization/integration_tests/test_ephemeral.py index 22d968ec5da5..9e86a5771e33 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/test_ephemeral.py +++ b/airbyte-integrations/bases/base-normalization/integration_tests/test_ephemeral.py @@ -23,6 +23,12 @@ @pytest.fixture(scope="module", autouse=True) def before_all_tests(request): destinations_to_test = dbt_test_utils.get_test_targets() + # set clean-up args to clean target destination after the test + clean_up_args = { + "destination_type": [d for d in DestinationType if d.value in destinations_to_test], + "test_type": "ephemeral", + "tmp_folders": temporary_folders, + } if DestinationType.POSTGRES.value not in destinations_to_test: destinations_to_test.append(DestinationType.POSTGRES.value) dbt_test_utils.set_target_schema("test_ephemeral") @@ -30,6 +36,7 @@ def before_all_tests(request): dbt_test_utils.setup_db(destinations_to_test) os.environ["PATH"] = os.path.abspath("../.venv/bin/") + ":" + os.environ["PATH"] yield + dbt_test_utils.clean_tmp_tables(**clean_up_args) dbt_test_utils.tear_down_db() for folder in temporary_folders: print(f"Deleting temporary test folder {folder}") @@ -91,6 +98,9 @@ def run_test(destination_type: DestinationType, column_count: int, expected_exce if destination_type.value == DestinationType.ORACLE.value: # Oracle does not allow changing to random schema dbt_test_utils.set_target_schema("test_normalization") + elif destination_type.value == DestinationType.REDSHIFT.value: + # set unique schema for Redshift test + dbt_test_utils.set_target_schema(dbt_test_utils.generate_random_string("test_ephemeral_")) else: dbt_test_utils.set_target_schema("test_ephemeral") print("Testing ephemeral") diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/test_normalization.py b/airbyte-integrations/bases/base-normalization/integration_tests/test_normalization.py index 8c6485796ed0..0c72fddf76a7 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/test_normalization.py +++ b/airbyte-integrations/bases/base-normalization/integration_tests/test_normalization.py @@ -28,6 +28,12 @@ @pytest.fixture(scope="module", autouse=True) def before_all_tests(request): destinations_to_test = dbt_test_utils.get_test_targets() + # set clean-up args to clean target destination after the test + clean_up_args = { + "destination_type": [d for d in DestinationType if d.value in destinations_to_test], + "test_type": "normalization", + "git_versioned_tests": git_versioned_tests, + } for integration_type in [d.value for d in DestinationType]: if integration_type in destinations_to_test: test_root_dir = f"{pathlib.Path().absolute()}/normalization_test_output/{integration_type.lower()}" @@ -39,11 +45,11 @@ def before_all_tests(request): dbt_test_utils.setup_db(destinations_to_test) os.environ["PATH"] = os.path.abspath("../.venv/bin/") + ":" + os.environ["PATH"] yield + dbt_test_utils.clean_tmp_tables(**clean_up_args) dbt_test_utils.tear_down_db() for folder in temporary_folders: print(f"Deleting temporary test folder {folder}") shutil.rmtree(folder, ignore_errors=True) - # TODO delete target_schema in destination by copying dbt_project.yml and injecting a on-run-end hook to clean up @pytest.fixture @@ -78,6 +84,9 @@ def test_normalization(destination_type: DestinationType, test_resource_name: st if destination_type.value == DestinationType.ORACLE.value: # Oracle does not allow changing to random schema dbt_test_utils.set_target_schema("test_normalization") + elif destination_type.value == DestinationType.REDSHIFT.value: + # set unique schema for Redshift test + dbt_test_utils.set_target_schema(dbt_test_utils.generate_random_string("test_normalization_")) try: run_test_normalization(destination_type, test_resource_name) finally: @@ -498,6 +507,11 @@ def to_lower_identifier(input: re.Match) -> str: def test_redshift_normalization_migration(tmp_path, setup_test_path): destination_type = DestinationType.REDSHIFT + clean_up_args = { + "destination_type": [destination_type], + "test_type": "ephemeral", # "ephemeral", because we parse /tmp folders + "tmp_folders": [str(tmp_path)], + } if destination_type.value not in dbt_test_utils.get_test_targets(): pytest.skip(f"Destinations {destination_type} is not in NORMALIZATION_TEST_TARGET env variable") base_dir = pathlib.Path(os.path.realpath(os.path.join(__file__, "../.."))) @@ -535,3 +549,5 @@ def test_redshift_normalization_migration(tmp_path, setup_test_path): run_destination_process(destination_type, tmp_path, messages_file2, "destination_catalog.json", docker_tag="dev") dbt_test_utils.dbt_run(destination_type, tmp_path, force_full_refresh=False) dbt_test(destination_type, tmp_path) + # clean-up test tables created for this test + dbt_test_utils.clean_tmp_tables(**clean_up_args) diff --git a/airbyte-integrations/bases/base-normalization/mysql.Dockerfile b/airbyte-integrations/bases/base-normalization/mysql.Dockerfile index a5349f3e323c..8e0f4ebc5b11 100644 --- a/airbyte-integrations/bases/base-normalization/mysql.Dockerfile +++ b/airbyte-integrations/bases/base-normalization/mysql.Dockerfile @@ -1,8 +1,4 @@ -# As of today, dbt-mysql doesn't support 1.0.0 -# IF YOU UPGRADE DBT, make sure to also edit these files: -# 1. Remove the "normalization-mysql" entry here https://github.com/airbytehq/airbyte/pull/11267/files#diff-9a3bcae8cb5c56aa30c00548e06eade6ad771f3d4f098f6867ae9a183049dfd8R404 -# 2. Check if oracle.Dockerfile is on DBT 1.0.0 yet; if it is, then revert this entire edit https://github.com/airbytehq/airbyte/pull/11267/files#diff-8880e85b2b5690accc6f15f9292a8589a6eb83564803d57c4ee74e2ee8ede09eR117-R130 -FROM fishtownanalytics/dbt:0.19.0 +FROM fishtownanalytics/dbt:1.0.0 COPY --from=airbyte/base-airbyte-protocol-python:0.1.1 /airbyte /airbyte # Install SSH Tunneling dependencies @@ -24,8 +20,7 @@ RUN pip install . WORKDIR /airbyte/normalization_code RUN pip install . -# Based of https://github.com/dbeatty10/dbt-mysql/tree/dev/0.19.0 -RUN pip install dbt-mysql==0.19.0 +RUN pip install dbt-mysql==1.0.0 WORKDIR /airbyte/normalization_code/dbt-template/ # Download external dbt dependencies diff --git a/airbyte-integrations/bases/base-normalization/normalization/transform_catalog/stream_processor.py b/airbyte-integrations/bases/base-normalization/normalization/transform_catalog/stream_processor.py index 161e9bcfae38..544b030dbedb 100644 --- a/airbyte-integrations/bases/base-normalization/normalization/transform_catalog/stream_processor.py +++ b/airbyte-integrations/bases/base-normalization/normalization/transform_catalog/stream_processor.py @@ -290,6 +290,7 @@ def process(self) -> List["StreamProcessor"]: is_intermediate=True, suffix="stg", ) + from_table = self.add_to_outputs( self.generate_scd_type_2_model(from_table, column_names), self.get_model_materialization_mode(is_intermediate=False, column_count=column_count), @@ -302,7 +303,7 @@ def process(self) -> List["StreamProcessor"]: where_clause = f"\nand {self.name_transformer.normalize_column_name('_airbyte_active_row')} = 1" # from_table should not use the de-duplicated final table or tables downstream (nested streams) will miss non active rows self.add_to_outputs( - self.generate_final_model(from_table, column_names, self.get_unique_key()) + where_clause, + self.generate_final_model(from_table, column_names, unique_key=self.get_unique_key()) + where_clause, self.get_model_materialization_mode(is_intermediate=False, column_count=column_count), is_intermediate=False, unique_key=self.get_unique_key(), @@ -455,6 +456,8 @@ def extract_json_column(property_name: str, json_column_name: str, definition: D if "type" in definition: if is_array(definition["type"]): json_extract = jinja_call(f"json_extract_array({json_column_name}, {json_path}, {normalized_json_path})") + if is_simple_property(definition.get("items", {"type": "object"}).get("type", "object")): + json_extract = jinja_call(f"json_extract_string_array({json_column_name}, {json_path}, {normalized_json_path})") elif is_object(definition["type"]): json_extract = jinja_call(f"json_extract('{table_alias}', {json_column_name}, {json_path}, {normalized_json_path})") elif is_simple_property(definition["type"]): @@ -696,6 +699,13 @@ def safe_cast_to_string(definition: Dict, column_name: str, destination_type: De return col def generate_scd_type_2_model(self, from_table: str, column_names: Dict[str, Tuple[str, str]]) -> Any: + """ + This model pulls data from the ID-hashing model and appends it to a log of record updates. When inserting an update to a record, it also + checks whether that record had a previously-existing row in the SCD model; if it does, then that previous row's end_at column is set to + the new update's start_at. + + See the docs for more details: https://docs.airbyte.com/understanding-airbyte/basic-normalization#normalization-metadata-columns + """ cursor_field = self.get_cursor_field(column_names) order_null = f"is null asc,\n {cursor_field} desc" if self.destination_type.value == DestinationType.ORACLE.value: @@ -787,6 +797,7 @@ def generate_scd_type_2_model(self, from_table: str, column_names: Dict[str, Tup "fields": self.list_fields(column_names), "from_table": from_table, "hash_id": self.hash_id(), + "incremental_clause": self.get_incremental_clause("this"), "input_data_table": input_data_table, "lag_begin": lag_begin, "lag_end": lag_end, @@ -857,7 +868,7 @@ def generate_scd_type_2_model(self, from_table: str, column_names: Dict[str, Tup from {{'{{'}} {{ from_table }} {{'}}'}} {{ sql_table_comment }} where 1 = 1 - {{'{{'}} incremental_clause({{ quoted_col_emitted_at }}) {{'}}'}} + {{ incremental_clause }} ), new_data_ids as ( -- build a subset of {{ unique_key }} from rows that are new @@ -1022,6 +1033,10 @@ def get_primary_key_from_path(self, column_names: Dict[str, Tuple[str, str]], pa raise ValueError(f"No path specified for stream {self.stream_name}") def generate_final_model(self, from_table: str, column_names: Dict[str, Tuple[str, str]], unique_key: str = "") -> Any: + """ + This is the table that the user actually wants. In addition to the columns that the source outputs, it has some additional metadata columns; + see the basic normalization docs for an explanation: https://docs.airbyte.com/understanding-airbyte/basic-normalization#normalization-metadata-columns + """ template = Template( """ -- Final base SQL model @@ -1066,15 +1081,18 @@ def add_incremental_clause(self, sql_query: str) -> Any: template = Template( """ {{ sql_query }} -{{'{{'}} incremental_clause({{ col_emitted_at }}) {{'}}'}} +{{ incremental_clause }} """ ) - sql = template.render( - sql_query=sql_query, - col_emitted_at=self.get_emitted_at(in_jinja=True), - ) + sql = template.render(sql_query=sql_query, incremental_clause=self.get_incremental_clause("this")) return sql + def get_incremental_clause(self, tablename: str) -> Any: + return self.get_incremental_clause_for_column(tablename, self.get_emitted_at(in_jinja=True)) + + def get_incremental_clause_for_column(self, tablename: str, column: str) -> Any: + return "{{ incremental_clause(" + column + ", " + tablename + ") }}" + @staticmethod def list_fields(column_names: Dict[str, Tuple[str, str]]) -> List[str]: return [column_names[field][0] for field in column_names] @@ -1106,20 +1124,112 @@ def add_to_outputs( else: config["schema"] = f'"{schema}"' if self.is_incremental_mode(self.destination_sync_mode): + stg_schema = self.get_schema(True) + stg_table = self.tables_registry.get_file_name(schema, self.json_path, self.stream_name, "stg", truncate_name) + if self.name_transformer.needs_quotes(stg_table): + stg_table = jinja_call(self.name_transformer.apply_quote(stg_table)) if suffix == "scd": - stg_schema = self.get_schema(True) - stg_table = self.tables_registry.get_file_name(schema, self.json_path, self.stream_name, "stg", truncate_name) - if self.name_transformer.needs_quotes(stg_table): - stg_table = jinja_call(self.name_transformer.apply_quote(stg_table)) + hooks = [] + + final_table_name = self.tables_registry.get_file_name(schema, self.json_path, self.stream_name, "", truncate_name) + active_row_column_name = self.name_transformer.normalize_column_name("_airbyte_active_row") + clickhouse_nullable_join_setting = "" + if self.destination_type == DestinationType.CLICKHOUSE: + # Clickhouse has special delete syntax + delete_statement = "alter table {{ final_table_relation }} delete" + unique_key_reference = self.get_unique_key(in_jinja=False) + noop_delete_statement = "alter table {{ this }} delete where 1=0" + # Without this, our LEFT JOIN would return empty string for non-matching rows, so our COUNT would include those rows. + # We want to exclude them (this is the default behavior in other DBs) so we have to set join_use_nulls=1 + clickhouse_nullable_join_setting = "SETTINGS join_use_nulls=1" + elif self.destination_type == DestinationType.BIGQUERY: + # Bigquery doesn't like the "delete from project.schema.table where project.schema.table.column in" syntax; + # it requires "delete from project.schema.table table_alias where table_alias.column in" + delete_statement = "delete from {{ final_table_relation }} final_table" + unique_key_reference = "final_table." + self.get_unique_key(in_jinja=False) + noop_delete_statement = "delete from {{ this }} where 1=0" + else: + delete_statement = "delete from {{ final_table_relation }}" + unique_key_reference = "{{ final_table_relation }}." + self.get_unique_key(in_jinja=False) + noop_delete_statement = "delete from {{ this }} where 1=0" + deletion_hook = Template( + """ + {{ '{%' }} + set final_table_relation = adapter.get_relation( + database=this.database, + schema=this.schema, + identifier='{{ final_table_name }}' + ) + {{ '%}' }} + {{ '{#' }} + If the final table doesn't exist, then obviously we can't delete anything from it. + Also, after a reset, the final table is created without the _airbyte_unique_key column (this column is created during the first sync) + So skip this deletion if the column doesn't exist. (in this case, the table is guaranteed to be empty anyway) + {{ '#}' }} + {{ '{%' }} + if final_table_relation is not none and {{ quoted_unique_key }} in adapter.get_columns_in_relation(final_table_relation)|map(attribute='name') + {{ '%}' }} + + -- Delete records which are no longer active: + -- This query is equivalent, but the left join version is more performant: + -- delete from final_table where unique_key in ( + -- select unique_key from scd_table where 1 = 1 + -- ) and unique_key not in ( + -- select unique_key from scd_table where active_row = 1 + -- ) + -- We're incremental against normalized_at rather than emitted_at because we need to fetch the SCD + -- entries that were _updated_ recently. This is because a deleted record will have an SCD record + -- which was emitted a long time ago, but recently re-normalized to have active_row = 0. + {{ delete_statement }} where {{ unique_key_reference }} in ( + select recent_records.unique_key + from ( + select distinct {{ unique_key }} as unique_key + from {{ '{{ this }}' }} + where 1=1 {{ normalized_at_incremental_clause }} + ) recent_records + left join ( + select {{ unique_key }} as unique_key, count({{ unique_key }}) as active_count + from {{ '{{ this }}' }} + where {{ active_row_column_name }} = 1 {{ normalized_at_incremental_clause }} + group by {{ unique_key }} + ) active_counts + on recent_records.unique_key = active_counts.unique_key + where active_count is null or active_count = 0 + ) + {{ '{% else %}' }} + -- We have to have a non-empty query, so just do a noop delete + {{ noop_delete_statement }} + {{ '{% endif %}' }} + """ + ).render( + delete_statement=delete_statement, + noop_delete_statement=noop_delete_statement, + final_table_name=final_table_name, + unique_key=self.get_unique_key(in_jinja=False), + quoted_unique_key=self.get_unique_key(in_jinja=True), + active_row_column_name=active_row_column_name, + normalized_at_incremental_clause=self.get_incremental_clause_for_column( + "this.schema + '.' + " + self.name_transformer.apply_quote(final_table_name), + self.get_normalized_at(in_jinja=True), + ), + unique_key_reference=unique_key_reference, + clickhouse_nullable_join_setting=clickhouse_nullable_join_setting, + ) + hooks.append(deletion_hook) + if self.destination_type.value == DestinationType.POSTGRES.value: # Keep only rows with the max emitted_at to keep incremental behavior - config["post_hook"] = ( - f'["delete from {stg_schema}.{stg_table} ' - + f"where {self.airbyte_emitted_at} != (select max({self.airbyte_emitted_at}) " - + f'from {stg_schema}.{stg_table})"]' + hooks.append( + f"delete from {stg_schema}.{stg_table} where {self.airbyte_emitted_at} != (select max({self.airbyte_emitted_at}) from {stg_schema}.{stg_table})", ) else: - config["post_hook"] = f'["drop view {stg_schema}.{stg_table}"]' + hooks.append(f"drop view {stg_schema}.{stg_table}") + + # Explicit function so that we can have type hints to satisfy the linter + def wrap_in_quotes(s: str) -> str: + return '"' + s + '"' + + config["post_hook"] = "[" + ",".join(map(wrap_in_quotes, hooks)) + "]" else: # incremental is handled in the SCD SQL already sql = self.add_incremental_clause(sql) diff --git a/airbyte-integrations/bases/base-normalization/normalization/transform_config/transform.py b/airbyte-integrations/bases/base-normalization/normalization/transform_config/transform.py index e8eccdf967bd..42e3838b8d7c 100644 --- a/airbyte-integrations/bases/base-normalization/normalization/transform_config/transform.py +++ b/airbyte-integrations/bases/base-normalization/normalization/transform_config/transform.py @@ -267,6 +267,11 @@ def transform_oracle(config: Dict[str, Any]): def transform_mssql(config: Dict[str, Any]): print("transform_mssql") # https://docs.getdbt.com/reference/warehouse-profiles/mssql-profile + + if TransformConfig.is_ssh_tunnelling(config): + config = TransformConfig.get_ssh_altered_config(config, port_key="port", host_key="host") + config["host"] = "127.0.0.1" # localhost is not supported by dbt-sqlserver. + dbt_config = { "type": "sqlserver", "driver": "ODBC Driver 17 for SQL Server", diff --git a/airbyte-integrations/bases/base-normalization/snowflake.Dockerfile b/airbyte-integrations/bases/base-normalization/snowflake.Dockerfile index c56167f953a4..bdc5a914889e 100644 --- a/airbyte-integrations/bases/base-normalization/snowflake.Dockerfile +++ b/airbyte-integrations/bases/base-normalization/snowflake.Dockerfile @@ -29,5 +29,5 @@ WORKDIR /airbyte ENV AIRBYTE_ENTRYPOINT "/airbyte/entrypoint.sh" ENTRYPOINT ["/airbyte/entrypoint.sh"] -LABEL io.airbyte.version=0.1.73 +LABEL io.airbyte.version=0.2.5 LABEL io.airbyte.name=airbyte/normalization-snowflake diff --git a/airbyte-integrations/bases/debezium/build.gradle b/airbyte-integrations/bases/debezium-v1-4-2/build.gradle similarity index 100% rename from airbyte-integrations/bases/debezium/build.gradle rename to airbyte-integrations/bases/debezium-v1-4-2/build.gradle diff --git a/airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/AirbyteDebeziumHandler.java b/airbyte-integrations/bases/debezium-v1-4-2/src/main/java/io/airbyte/integrations/debezium/AirbyteDebeziumHandler.java similarity index 94% rename from airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/AirbyteDebeziumHandler.java rename to airbyte-integrations/bases/debezium-v1-4-2/src/main/java/io/airbyte/integrations/debezium/AirbyteDebeziumHandler.java index 9d89d5a5a781..f9dc60228cdb 100644 --- a/airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/AirbyteDebeziumHandler.java +++ b/airbyte-integrations/bases/debezium-v1-4-2/src/main/java/io/airbyte/integrations/debezium/AirbyteDebeziumHandler.java @@ -17,6 +17,8 @@ import io.airbyte.integrations.debezium.internals.FilteredFileDatabaseHistory; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import io.airbyte.protocol.models.ConfiguredAirbyteStream; +import io.airbyte.protocol.models.SyncMode; import io.debezium.engine.ChangeEvent; import java.time.Instant; import java.util.Collections; @@ -120,4 +122,9 @@ private Optional schemaHistoryManager(final CdcSave return Optional.empty(); } + public static boolean shouldUseCDC(final ConfiguredAirbyteCatalog catalog) { + return catalog.getStreams().stream().map(ConfiguredAirbyteStream::getSyncMode) + .anyMatch(syncMode -> syncMode == SyncMode.INCREMENTAL); + } + } diff --git a/airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/CdcMetadataInjector.java b/airbyte-integrations/bases/debezium-v1-4-2/src/main/java/io/airbyte/integrations/debezium/CdcMetadataInjector.java similarity index 100% rename from airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/CdcMetadataInjector.java rename to airbyte-integrations/bases/debezium-v1-4-2/src/main/java/io/airbyte/integrations/debezium/CdcMetadataInjector.java diff --git a/airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/CdcSavedInfoFetcher.java b/airbyte-integrations/bases/debezium-v1-4-2/src/main/java/io/airbyte/integrations/debezium/CdcSavedInfoFetcher.java similarity index 100% rename from airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/CdcSavedInfoFetcher.java rename to airbyte-integrations/bases/debezium-v1-4-2/src/main/java/io/airbyte/integrations/debezium/CdcSavedInfoFetcher.java diff --git a/airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/CdcStateHandler.java b/airbyte-integrations/bases/debezium-v1-4-2/src/main/java/io/airbyte/integrations/debezium/CdcStateHandler.java similarity index 100% rename from airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/CdcStateHandler.java rename to airbyte-integrations/bases/debezium-v1-4-2/src/main/java/io/airbyte/integrations/debezium/CdcStateHandler.java diff --git a/airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/CdcTargetPosition.java b/airbyte-integrations/bases/debezium-v1-4-2/src/main/java/io/airbyte/integrations/debezium/CdcTargetPosition.java similarity index 100% rename from airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/CdcTargetPosition.java rename to airbyte-integrations/bases/debezium-v1-4-2/src/main/java/io/airbyte/integrations/debezium/CdcTargetPosition.java diff --git a/airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/AirbyteFileOffsetBackingStore.java b/airbyte-integrations/bases/debezium-v1-4-2/src/main/java/io/airbyte/integrations/debezium/internals/AirbyteFileOffsetBackingStore.java similarity index 100% rename from airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/AirbyteFileOffsetBackingStore.java rename to airbyte-integrations/bases/debezium-v1-4-2/src/main/java/io/airbyte/integrations/debezium/internals/AirbyteFileOffsetBackingStore.java diff --git a/airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/AirbyteSchemaHistoryStorage.java b/airbyte-integrations/bases/debezium-v1-4-2/src/main/java/io/airbyte/integrations/debezium/internals/AirbyteSchemaHistoryStorage.java similarity index 100% rename from airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/AirbyteSchemaHistoryStorage.java rename to airbyte-integrations/bases/debezium-v1-4-2/src/main/java/io/airbyte/integrations/debezium/internals/AirbyteSchemaHistoryStorage.java diff --git a/airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/DebeziumConverterUtils.java b/airbyte-integrations/bases/debezium-v1-4-2/src/main/java/io/airbyte/integrations/debezium/internals/DebeziumConverterUtils.java similarity index 100% rename from airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/DebeziumConverterUtils.java rename to airbyte-integrations/bases/debezium-v1-4-2/src/main/java/io/airbyte/integrations/debezium/internals/DebeziumConverterUtils.java diff --git a/airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/DebeziumEventUtils.java b/airbyte-integrations/bases/debezium-v1-4-2/src/main/java/io/airbyte/integrations/debezium/internals/DebeziumEventUtils.java similarity index 100% rename from airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/DebeziumEventUtils.java rename to airbyte-integrations/bases/debezium-v1-4-2/src/main/java/io/airbyte/integrations/debezium/internals/DebeziumEventUtils.java diff --git a/airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/DebeziumRecordIterator.java b/airbyte-integrations/bases/debezium-v1-4-2/src/main/java/io/airbyte/integrations/debezium/internals/DebeziumRecordIterator.java similarity index 100% rename from airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/DebeziumRecordIterator.java rename to airbyte-integrations/bases/debezium-v1-4-2/src/main/java/io/airbyte/integrations/debezium/internals/DebeziumRecordIterator.java diff --git a/airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/DebeziumRecordPublisher.java b/airbyte-integrations/bases/debezium-v1-4-2/src/main/java/io/airbyte/integrations/debezium/internals/DebeziumRecordPublisher.java similarity index 100% rename from airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/DebeziumRecordPublisher.java rename to airbyte-integrations/bases/debezium-v1-4-2/src/main/java/io/airbyte/integrations/debezium/internals/DebeziumRecordPublisher.java diff --git a/airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/FilteredFileDatabaseHistory.java b/airbyte-integrations/bases/debezium-v1-4-2/src/main/java/io/airbyte/integrations/debezium/internals/FilteredFileDatabaseHistory.java similarity index 100% rename from airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/FilteredFileDatabaseHistory.java rename to airbyte-integrations/bases/debezium-v1-4-2/src/main/java/io/airbyte/integrations/debezium/internals/FilteredFileDatabaseHistory.java diff --git a/airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/MSSQLConverter.java b/airbyte-integrations/bases/debezium-v1-4-2/src/main/java/io/airbyte/integrations/debezium/internals/MSSQLConverter.java similarity index 100% rename from airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/MSSQLConverter.java rename to airbyte-integrations/bases/debezium-v1-4-2/src/main/java/io/airbyte/integrations/debezium/internals/MSSQLConverter.java diff --git a/airbyte-integrations/bases/debezium-v1-4-2/src/main/java/io/airbyte/integrations/debezium/internals/MySQLConverter.java b/airbyte-integrations/bases/debezium-v1-4-2/src/main/java/io/airbyte/integrations/debezium/internals/MySQLConverter.java new file mode 100644 index 000000000000..ac099bc15cdc --- /dev/null +++ b/airbyte-integrations/bases/debezium-v1-4-2/src/main/java/io/airbyte/integrations/debezium/internals/MySQLConverter.java @@ -0,0 +1,75 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.debezium.internals; + +import io.airbyte.db.DataTypeUtils; +import io.debezium.spi.converter.CustomConverter; +import io.debezium.spi.converter.RelationalColumn; +import java.nio.charset.StandardCharsets; +import java.time.LocalDate; +import java.util.Arrays; +import java.util.Properties; +import org.apache.kafka.connect.data.SchemaBuilder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * This is a custom debezium converter used in MySQL to handle the DATETIME data type. We need a + * custom converter cause by default debezium returns the DATETIME values as numbers. We need to + * convert it to proper format. Ref : + * https://debezium.io/documentation/reference/1.4/development/converters.html This is built from + * reference with {@link io.debezium.connector.mysql.converters.TinyIntOneToBooleanConverter} If you + * rename this class then remember to rename the datetime.type property value in + * io.airbyte-integrations.source.mysql.MySqlCdcProperties#getDebeziumProperties() (If you don't + * rename, a test would still fail but it might be tricky to figure out where to change the property + * name) + */ +public class MySQLConverter implements CustomConverter { + + private static final Logger LOGGER = LoggerFactory.getLogger(MySQLConverter.class); + + private final String[] DATE_TYPES = {"DATE", "DATETIME", "TIME"}; + private final String[] TEXT_TYPES = {"CHAR", "VARCHAR", "TEXT", "LONGTEXT", "TINYTEXT", "MEDIUMTEXT"}; + + @Override + public void configure(final Properties props) {} + + @Override + public void converterFor(final RelationalColumn field, final ConverterRegistration registration) { + if (Arrays.stream(DATE_TYPES).anyMatch(s -> s.equalsIgnoreCase(field.typeName()))) { + registerDate(field, registration); + } else if (Arrays.stream(TEXT_TYPES).anyMatch(s -> s.equalsIgnoreCase(field.typeName()))) { + registerText(field, registration); + } + } + + private void registerText(final RelationalColumn field, final ConverterRegistration registration) { + registration.register(SchemaBuilder.string(), x -> { + if (x == null) { + return DebeziumConverterUtils.convertDefaultValue(field); + } + if (x instanceof byte[]) { + return new String((byte[]) x, StandardCharsets.UTF_8); + } else { + return x.toString(); + } + }); + } + + /** + * The debezium driver replaces Zero-value by Null even when this column is mandatory. According to + * the doc, it should be done by driver, but it fails. + */ + private Object convertDefaultValueNullDate(final RelationalColumn field) { + final var defaultValue = DebeziumConverterUtils.convertDefaultValue(field); + return (defaultValue == null && !field.isOptional() ? DataTypeUtils.toISO8601String(LocalDate.EPOCH) : defaultValue); + } + + private void registerDate(final RelationalColumn field, final ConverterRegistration registration) { + registration.register(SchemaBuilder.string(), + x -> x == null ? convertDefaultValueNullDate(field) : DebeziumConverterUtils.convertDate(x)); + } + +} diff --git a/airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/PostgresConverter.java b/airbyte-integrations/bases/debezium-v1-4-2/src/main/java/io/airbyte/integrations/debezium/internals/PostgresConverter.java similarity index 100% rename from airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/PostgresConverter.java rename to airbyte-integrations/bases/debezium-v1-4-2/src/main/java/io/airbyte/integrations/debezium/internals/PostgresConverter.java diff --git a/airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/SnapshotMetadata.java b/airbyte-integrations/bases/debezium-v1-4-2/src/main/java/io/airbyte/integrations/debezium/internals/SnapshotMetadata.java similarity index 100% rename from airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/SnapshotMetadata.java rename to airbyte-integrations/bases/debezium-v1-4-2/src/main/java/io/airbyte/integrations/debezium/internals/SnapshotMetadata.java diff --git a/airbyte-integrations/bases/debezium-v1-4-2/src/test/java/io/airbyte/integrations/debezium/AirbyteDebeziumHandlerTest.java b/airbyte-integrations/bases/debezium-v1-4-2/src/test/java/io/airbyte/integrations/debezium/AirbyteDebeziumHandlerTest.java new file mode 100644 index 000000000000..45d50612f792 --- /dev/null +++ b/airbyte-integrations/bases/debezium-v1-4-2/src/test/java/io/airbyte/integrations/debezium/AirbyteDebeziumHandlerTest.java @@ -0,0 +1,56 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.debezium; + +import com.google.common.collect.Lists; +import io.airbyte.protocol.models.AirbyteCatalog; +import io.airbyte.protocol.models.CatalogHelpers; +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import io.airbyte.protocol.models.Field; +import io.airbyte.protocol.models.JsonSchemaType; +import io.airbyte.protocol.models.SyncMode; +import java.util.List; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +public class AirbyteDebeziumHandlerTest { + + @Test + public void shouldUseCdcTestShouldReturnTrue() { + final AirbyteCatalog catalog = new AirbyteCatalog().withStreams(List.of( + CatalogHelpers.createAirbyteStream( + "MODELS_STREAM_NAME", + "MODELS_SCHEMA", + Field.of("COL_ID", JsonSchemaType.NUMBER), + Field.of("COL_MAKE_ID", JsonSchemaType.NUMBER), + Field.of("COL_MODEL", JsonSchemaType.STRING)) + .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) + .withSourceDefinedPrimaryKey(List.of(List.of("COL_ID"))))); + final ConfiguredAirbyteCatalog configuredCatalog = CatalogHelpers + .toDefaultConfiguredCatalog(catalog); + // set all streams to incremental. + configuredCatalog.getStreams().forEach(s -> s.setSyncMode(SyncMode.INCREMENTAL)); + + Assertions.assertTrue(AirbyteDebeziumHandler.shouldUseCDC(configuredCatalog)); + } + + @Test + public void shouldUseCdcTestShouldReturnFalse() { + final AirbyteCatalog catalog = new AirbyteCatalog().withStreams(List.of( + CatalogHelpers.createAirbyteStream( + "MODELS_STREAM_NAME", + "MODELS_SCHEMA", + Field.of("COL_ID", JsonSchemaType.NUMBER), + Field.of("COL_MAKE_ID", JsonSchemaType.NUMBER), + Field.of("COL_MODEL", JsonSchemaType.STRING)) + .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) + .withSourceDefinedPrimaryKey(List.of(List.of("COL_ID"))))); + final ConfiguredAirbyteCatalog configuredCatalog = CatalogHelpers + .toDefaultConfiguredCatalog(catalog); + + Assertions.assertFalse(AirbyteDebeziumHandler.shouldUseCDC(configuredCatalog)); + } + +} diff --git a/airbyte-integrations/bases/debezium/src/test/java/io/airbyte/integrations/debezium/AirbyteFileOffsetBackingStoreTest.java b/airbyte-integrations/bases/debezium-v1-4-2/src/test/java/io/airbyte/integrations/debezium/AirbyteFileOffsetBackingStoreTest.java similarity index 100% rename from airbyte-integrations/bases/debezium/src/test/java/io/airbyte/integrations/debezium/AirbyteFileOffsetBackingStoreTest.java rename to airbyte-integrations/bases/debezium-v1-4-2/src/test/java/io/airbyte/integrations/debezium/AirbyteFileOffsetBackingStoreTest.java diff --git a/airbyte-integrations/bases/debezium/src/test/java/io/airbyte/integrations/debezium/DebeziumEventUtilsTest.java b/airbyte-integrations/bases/debezium-v1-4-2/src/test/java/io/airbyte/integrations/debezium/DebeziumEventUtilsTest.java similarity index 100% rename from airbyte-integrations/bases/debezium/src/test/java/io/airbyte/integrations/debezium/DebeziumEventUtilsTest.java rename to airbyte-integrations/bases/debezium-v1-4-2/src/test/java/io/airbyte/integrations/debezium/DebeziumEventUtilsTest.java diff --git a/airbyte-integrations/bases/debezium/src/test/java/io/airbyte/integrations/debezium/DebeziumRecordPublisherTest.java b/airbyte-integrations/bases/debezium-v1-4-2/src/test/java/io/airbyte/integrations/debezium/DebeziumRecordPublisherTest.java similarity index 100% rename from airbyte-integrations/bases/debezium/src/test/java/io/airbyte/integrations/debezium/DebeziumRecordPublisherTest.java rename to airbyte-integrations/bases/debezium-v1-4-2/src/test/java/io/airbyte/integrations/debezium/DebeziumRecordPublisherTest.java diff --git a/airbyte-integrations/bases/debezium/src/test/java/io/airbyte/integrations/debezium/internals/DebeziumConverterUtilsTest.java b/airbyte-integrations/bases/debezium-v1-4-2/src/test/java/io/airbyte/integrations/debezium/internals/DebeziumConverterUtilsTest.java similarity index 100% rename from airbyte-integrations/bases/debezium/src/test/java/io/airbyte/integrations/debezium/internals/DebeziumConverterUtilsTest.java rename to airbyte-integrations/bases/debezium-v1-4-2/src/test/java/io/airbyte/integrations/debezium/internals/DebeziumConverterUtilsTest.java diff --git a/airbyte-integrations/bases/debezium/src/test/resources/delete_change_event.json b/airbyte-integrations/bases/debezium-v1-4-2/src/test/resources/delete_change_event.json similarity index 100% rename from airbyte-integrations/bases/debezium/src/test/resources/delete_change_event.json rename to airbyte-integrations/bases/debezium-v1-4-2/src/test/resources/delete_change_event.json diff --git a/airbyte-integrations/bases/debezium/src/test/resources/delete_message.json b/airbyte-integrations/bases/debezium-v1-4-2/src/test/resources/delete_message.json similarity index 100% rename from airbyte-integrations/bases/debezium/src/test/resources/delete_message.json rename to airbyte-integrations/bases/debezium-v1-4-2/src/test/resources/delete_message.json diff --git a/airbyte-integrations/bases/debezium/src/test/resources/insert_change_event.json b/airbyte-integrations/bases/debezium-v1-4-2/src/test/resources/insert_change_event.json similarity index 100% rename from airbyte-integrations/bases/debezium/src/test/resources/insert_change_event.json rename to airbyte-integrations/bases/debezium-v1-4-2/src/test/resources/insert_change_event.json diff --git a/airbyte-integrations/bases/debezium/src/test/resources/insert_message.json b/airbyte-integrations/bases/debezium-v1-4-2/src/test/resources/insert_message.json similarity index 100% rename from airbyte-integrations/bases/debezium/src/test/resources/insert_message.json rename to airbyte-integrations/bases/debezium-v1-4-2/src/test/resources/insert_message.json diff --git a/airbyte-integrations/bases/debezium/src/test/resources/test_debezium_offset.dat b/airbyte-integrations/bases/debezium-v1-4-2/src/test/resources/test_debezium_offset.dat similarity index 100% rename from airbyte-integrations/bases/debezium/src/test/resources/test_debezium_offset.dat rename to airbyte-integrations/bases/debezium-v1-4-2/src/test/resources/test_debezium_offset.dat diff --git a/airbyte-integrations/bases/debezium/src/test/resources/update_change_event.json b/airbyte-integrations/bases/debezium-v1-4-2/src/test/resources/update_change_event.json similarity index 100% rename from airbyte-integrations/bases/debezium/src/test/resources/update_change_event.json rename to airbyte-integrations/bases/debezium-v1-4-2/src/test/resources/update_change_event.json diff --git a/airbyte-integrations/bases/debezium/src/test/resources/update_message.json b/airbyte-integrations/bases/debezium-v1-4-2/src/test/resources/update_message.json similarity index 100% rename from airbyte-integrations/bases/debezium/src/test/resources/update_message.json rename to airbyte-integrations/bases/debezium-v1-4-2/src/test/resources/update_message.json diff --git a/airbyte-integrations/bases/debezium/src/testFixtures/java/io/airbyte/integrations/debezium/CdcSourceTest.java b/airbyte-integrations/bases/debezium-v1-4-2/src/testFixtures/java/io/airbyte/integrations/debezium/CdcSourceTest.java similarity index 98% rename from airbyte-integrations/bases/debezium/src/testFixtures/java/io/airbyte/integrations/debezium/CdcSourceTest.java rename to airbyte-integrations/bases/debezium-v1-4-2/src/testFixtures/java/io/airbyte/integrations/debezium/CdcSourceTest.java index a1049f0b7450..04cd2bfc20b8 100644 --- a/airbyte-integrations/bases/debezium/src/testFixtures/java/io/airbyte/integrations/debezium/CdcSourceTest.java +++ b/airbyte-integrations/bases/debezium-v1-4-2/src/testFixtures/java/io/airbyte/integrations/debezium/CdcSourceTest.java @@ -316,7 +316,7 @@ void testDelete() throws Exception { .format("DELETE FROM %s.%s WHERE %s = %s", MODELS_SCHEMA, MODELS_STREAM_NAME, COL_ID, 11)); - final JsonNode state = stateMessages1.get(0).getData(); + final JsonNode state = Jsons.jsonNode(stateMessages1); final AutoCloseableIterator read2 = getSource() .read(getConfig(), CONFIGURED_CATALOG, state); final List actualRecords2 = AutoCloseableIterators.toListAndClose(read2); @@ -347,7 +347,7 @@ void testUpdate() throws Exception { .format("UPDATE %s.%s SET %s = '%s' WHERE %s = %s", MODELS_SCHEMA, MODELS_STREAM_NAME, COL_MODEL, updatedModel, COL_ID, 11)); - final JsonNode state = stateMessages1.get(0).getData(); + final JsonNode state = Jsons.jsonNode(stateMessages1); final AutoCloseableIterator read2 = getSource() .read(getConfig(), CONFIGURED_CATALOG, state); final List actualRecords2 = AutoCloseableIterators.toListAndClose(read2); @@ -403,7 +403,7 @@ void testRecordsProducedDuringAndAfterSync() throws Exception { recordsCreated[0]++; } - final JsonNode state = stateAfterFirstBatch.get(0).getData(); + final JsonNode state = Jsons.jsonNode(stateAfterFirstBatch); final AutoCloseableIterator secondBatchIterator = getSource() .read(getConfig(), CONFIGURED_CATALOG, state); final List dataFromSecondBatch = AutoCloseableIterators @@ -492,7 +492,7 @@ void testCdcAndFullRefreshInSameSync() throws Exception { .jsonNode(ImmutableMap.of(COL_ID, 100, COL_MAKE_ID, 3, COL_MODEL, "Punto")); writeModelRecord(puntoRecord); - final JsonNode state = extractStateMessages(actualRecords1).get(0).getData(); + final JsonNode state = Jsons.jsonNode(extractStateMessages(actualRecords1)); final AutoCloseableIterator read2 = getSource() .read(getConfig(), configuredCatalog, state); final List actualRecords2 = AutoCloseableIterators.toListAndClose(read2); @@ -535,7 +535,7 @@ void testNoDataOnSecondSync() throws Exception { final AutoCloseableIterator read1 = getSource() .read(getConfig(), CONFIGURED_CATALOG, null); final List actualRecords1 = AutoCloseableIterators.toListAndClose(read1); - final JsonNode state = extractStateMessages(actualRecords1).get(0).getData(); + final JsonNode state = Jsons.jsonNode(extractStateMessages(actualRecords1)); final AutoCloseableIterator read2 = getSource() .read(getConfig(), CONFIGURED_CATALOG, state); diff --git a/airbyte-integrations/bases/debezium-v1-9-2/build.gradle b/airbyte-integrations/bases/debezium-v1-9-2/build.gradle new file mode 100644 index 000000000000..6359e00d400a --- /dev/null +++ b/airbyte-integrations/bases/debezium-v1-9-2/build.gradle @@ -0,0 +1,27 @@ +plugins { + id "java-test-fixtures" +} + +project.configurations { + testFixturesImplementation.extendsFrom implementation +} +dependencies { + implementation project(':airbyte-protocol:protocol-models') + implementation project(':airbyte-db:db-lib') + + implementation 'io.debezium:debezium-api:1.9.2.Final' + implementation 'io.debezium:debezium-embedded:1.9.2.Final' +// commented out because source mysql and sqlserver do not yet support the new cdc implementation +// implementation 'io.debezium:debezium-connector-sqlserver:1.9.2.Final' +// implementation 'io.debezium:debezium-connector-mysql:1.9.2.Final' + implementation 'io.debezium:debezium-connector-postgres:1.9.2.Final' + implementation 'org.codehaus.plexus:plexus-utils:3.4.2' + + testFixturesImplementation project(':airbyte-db:db-lib') + testFixturesImplementation project(':airbyte-integrations:bases:base-java') + + testFixturesImplementation 'org.junit.jupiter:junit-jupiter-engine:5.4.2' + testFixturesImplementation 'org.junit.jupiter:junit-jupiter-api:5.4.2' + testFixturesImplementation 'org.junit.jupiter:junit-jupiter-params:5.4.2' + +} diff --git a/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/AirbyteDebeziumHandler.java b/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/AirbyteDebeziumHandler.java new file mode 100644 index 000000000000..f9dc60228cdb --- /dev/null +++ b/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/AirbyteDebeziumHandler.java @@ -0,0 +1,130 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.debezium; + +import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.commons.util.AutoCloseableIterator; +import io.airbyte.commons.util.AutoCloseableIterators; +import io.airbyte.commons.util.CompositeIterator; +import io.airbyte.commons.util.MoreIterators; +import io.airbyte.integrations.debezium.internals.AirbyteFileOffsetBackingStore; +import io.airbyte.integrations.debezium.internals.AirbyteSchemaHistoryStorage; +import io.airbyte.integrations.debezium.internals.DebeziumEventUtils; +import io.airbyte.integrations.debezium.internals.DebeziumRecordIterator; +import io.airbyte.integrations.debezium.internals.DebeziumRecordPublisher; +import io.airbyte.integrations.debezium.internals.FilteredFileDatabaseHistory; +import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import io.airbyte.protocol.models.ConfiguredAirbyteStream; +import io.airbyte.protocol.models.SyncMode; +import io.debezium.engine.ChangeEvent; +import java.time.Instant; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Properties; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.function.Supplier; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * This class acts as the bridge between Airbyte DB connectors and debezium. If a DB connector wants + * to use debezium for CDC, it should use this class + */ +public class AirbyteDebeziumHandler { + + private static final Logger LOGGER = LoggerFactory.getLogger(AirbyteDebeziumHandler.class); + /** + * We use 10000 as capacity cause the default queue size and batch size of debezium is : + * {@link io.debezium.config.CommonConnectorConfig#DEFAULT_MAX_BATCH_SIZE}is 2048 + * {@link io.debezium.config.CommonConnectorConfig#DEFAULT_MAX_QUEUE_SIZE} is 8192 + */ + private static final int QUEUE_CAPACITY = 10000; + + private final Properties connectorProperties; + private final JsonNode config; + private final CdcTargetPosition targetPosition; + private final ConfiguredAirbyteCatalog catalog; + private final boolean trackSchemaHistory; + + private final LinkedBlockingQueue> queue; + + public AirbyteDebeziumHandler(final JsonNode config, + final CdcTargetPosition targetPosition, + final Properties connectorProperties, + final ConfiguredAirbyteCatalog catalog, + final boolean trackSchemaHistory) { + this.config = config; + this.targetPosition = targetPosition; + this.connectorProperties = connectorProperties; + this.catalog = catalog; + this.trackSchemaHistory = trackSchemaHistory; + this.queue = new LinkedBlockingQueue<>(QUEUE_CAPACITY); + } + + public List> getIncrementalIterators(final CdcSavedInfoFetcher cdcSavedInfoFetcher, + final CdcStateHandler cdcStateHandler, + final CdcMetadataInjector cdcMetadataInjector, + final Instant emittedAt) { + LOGGER.info("using CDC: {}", true); + final AirbyteFileOffsetBackingStore offsetManager = AirbyteFileOffsetBackingStore.initializeState(cdcSavedInfoFetcher.getSavedOffset()); + final Optional schemaHistoryManager = schemaHistoryManager(cdcSavedInfoFetcher); + final DebeziumRecordPublisher publisher = new DebeziumRecordPublisher(connectorProperties, config, catalog, offsetManager, + schemaHistoryManager); + publisher.start(queue); + + // handle state machine around pub/sub logic. + final AutoCloseableIterator> eventIterator = new DebeziumRecordIterator( + queue, + targetPosition, + publisher::hasClosed, + publisher::close); + + // convert to airbyte message. + final AutoCloseableIterator messageIterator = AutoCloseableIterators + .transform( + eventIterator, + (event) -> DebeziumEventUtils.toAirbyteMessage(event, cdcMetadataInjector, emittedAt)); + + // our goal is to get the state at the time this supplier is called (i.e. after all message records + // have been produced) + final Supplier stateMessageSupplier = () -> { + final Map offset = offsetManager.read(); + final String dbHistory = trackSchemaHistory ? schemaHistoryManager + .orElseThrow(() -> new RuntimeException("Schema History Tracking is true but manager is not initialised")).read() : null; + + return cdcStateHandler.saveState(offset, dbHistory); + }; + + // wrap the supplier in an iterator so that we can concat it to the message iterator. + final Iterator stateMessageIterator = MoreIterators.singletonIteratorFromSupplier(stateMessageSupplier); + + // this structure guarantees that the debezium engine will be closed, before we attempt to emit the + // state file. we want this so that we have a guarantee that the debezium offset file (which we use + // to produce the state file) is up-to-date. + final CompositeIterator messageIteratorWithStateDecorator = + AutoCloseableIterators.concatWithEagerClose(messageIterator, AutoCloseableIterators.fromIterator(stateMessageIterator)); + + return Collections.singletonList(messageIteratorWithStateDecorator); + } + + private Optional schemaHistoryManager(final CdcSavedInfoFetcher cdcSavedInfoFetcher) { + if (trackSchemaHistory) { + FilteredFileDatabaseHistory.setDatabaseName(config.get("database").asText()); + return Optional.of(AirbyteSchemaHistoryStorage.initializeDBHistory(cdcSavedInfoFetcher.getSavedSchemaHistory())); + } + + return Optional.empty(); + } + + public static boolean shouldUseCDC(final ConfiguredAirbyteCatalog catalog) { + return catalog.getStreams().stream().map(ConfiguredAirbyteStream::getSyncMode) + .anyMatch(syncMode -> syncMode == SyncMode.INCREMENTAL); + } + +} diff --git a/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/CdcMetadataInjector.java b/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/CdcMetadataInjector.java new file mode 100644 index 000000000000..cd99773d99f1 --- /dev/null +++ b/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/CdcMetadataInjector.java @@ -0,0 +1,36 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.debezium; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ObjectNode; + +/** + * This interface is used to add metadata to the records fetched from the database. For instance, in + * Postgres we add the lsn to the records. In MySql we add the file name and position to the + * records. + */ +public interface CdcMetadataInjector { + + /** + * A debezium record contains multiple pieces. Ref : + * https://debezium.io/documentation/reference/1.9/connectors/mysql.html#mysql-create-events + * + * @param event is the actual record which contains data and would be written to the destination + * @param source contains the metadata about the record and we need to extract that metadata and add + * it to the event before writing it to destination + */ + void addMetaData(ObjectNode event, JsonNode source); + + /** + * As part of Airbyte record we need to add the namespace (schema name) + * + * @param source part of debezium record and contains the metadata about the record. We need to + * extract namespace out of this metadata and return Ref : + * https://debezium.io/documentation/reference/1.9/connectors/mysql.html#mysql-create-events + */ + String namespace(JsonNode source); + +} diff --git a/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/CdcSavedInfoFetcher.java b/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/CdcSavedInfoFetcher.java new file mode 100644 index 000000000000..a0efa36f05a8 --- /dev/null +++ b/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/CdcSavedInfoFetcher.java @@ -0,0 +1,20 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.debezium; + +import com.fasterxml.jackson.databind.JsonNode; +import java.util.Optional; + +/** + * This interface is used to fetch the saved info required for debezium to run incrementally. Each + * connector saves offset and schema history in different manner + */ +public interface CdcSavedInfoFetcher { + + JsonNode getSavedOffset(); + + Optional getSavedSchemaHistory(); + +} diff --git a/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/CdcStateHandler.java b/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/CdcStateHandler.java new file mode 100644 index 000000000000..7b76186fc9c1 --- /dev/null +++ b/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/CdcStateHandler.java @@ -0,0 +1,19 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.debezium; + +import io.airbyte.protocol.models.AirbyteMessage; +import java.util.Map; + +/** + * This interface is used to allow connectors to save the offset and schema history in the manner + * which suits them + */ +@FunctionalInterface +public interface CdcStateHandler { + + AirbyteMessage saveState(Map offset, String dbHistory); + +} diff --git a/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/CdcTargetPosition.java b/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/CdcTargetPosition.java new file mode 100644 index 000000000000..47209ada28f7 --- /dev/null +++ b/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/CdcTargetPosition.java @@ -0,0 +1,20 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.debezium; + +import com.fasterxml.jackson.databind.JsonNode; + +/** + * This interface is used to define the target position at the beginning of the sync so that once we + * reach the desired target, we can shutdown the sync. This is needed because it might happen that + * while we are syncing the data, new changes are being made in the source database and as a result + * we might end up syncing forever. In order to tackle that, we need to define a point to end at the + * beginning of the sync + */ +public interface CdcTargetPosition { + + boolean reachedTargetPosition(JsonNode valueAsJson); + +} diff --git a/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/internals/AirbyteFileOffsetBackingStore.java b/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/internals/AirbyteFileOffsetBackingStore.java new file mode 100644 index 000000000000..89dbd3d1f472 --- /dev/null +++ b/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/internals/AirbyteFileOffsetBackingStore.java @@ -0,0 +1,144 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.debezium.internals; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.base.Preconditions; +import io.airbyte.commons.json.Jsons; +import java.io.EOFException; +import java.io.IOException; +import java.io.ObjectOutputStream; +import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.NoSuchFileException; +import java.nio.file.Path; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.stream.Collectors; +import org.apache.commons.io.FileUtils; +import org.apache.kafka.connect.errors.ConnectException; +import org.apache.kafka.connect.util.SafeObjectInputStream; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * This class handles reading and writing a debezium offset file. In many cases it is duplicating + * logic in debezium because that logic is not exposed in the public API. We mostly treat the + * contents of this state file like a black box. We know it is a Map<ByteBuffer, Bytebuffer>. + * We deserialize it to a Map<String, String> so that the state file can be human readable. If + * we ever discover that any of the contents of these offset files is not string serializable we + * will likely have to drop the human readability support and just base64 encode it. + */ +public class AirbyteFileOffsetBackingStore { + + private static final Logger LOGGER = LoggerFactory.getLogger(AirbyteFileOffsetBackingStore.class); + + private final Path offsetFilePath; + + public AirbyteFileOffsetBackingStore(final Path offsetFilePath) { + this.offsetFilePath = offsetFilePath; + } + + public Path getOffsetFilePath() { + return offsetFilePath; + } + + public Map read() { + final Map raw = load(); + + return raw.entrySet().stream().collect(Collectors.toMap( + e -> byteBufferToString(e.getKey()), + e -> byteBufferToString(e.getValue()))); + } + + @SuppressWarnings("unchecked") + public void persist(final JsonNode cdcState) { + final Map mapAsString = + cdcState != null ? Jsons.object(cdcState, Map.class) : Collections.emptyMap(); + final Map mappedAsStrings = mapAsString.entrySet().stream().collect(Collectors.toMap( + e -> stringToByteBuffer(e.getKey()), + e -> stringToByteBuffer(e.getValue()))); + + FileUtils.deleteQuietly(offsetFilePath.toFile()); + save(mappedAsStrings); + } + + private static String byteBufferToString(final ByteBuffer byteBuffer) { + Preconditions.checkNotNull(byteBuffer); + return new String(byteBuffer.array(), StandardCharsets.UTF_8); + } + + private static ByteBuffer stringToByteBuffer(final String s) { + Preconditions.checkNotNull(s); + return ByteBuffer.wrap(s.getBytes(StandardCharsets.UTF_8)); + } + + /** + * See FileOffsetBackingStore#load - logic is mostly borrowed from here. duplicated because this + * method is not public. + */ + @SuppressWarnings("unchecked") + private Map load() { + try (final SafeObjectInputStream is = new SafeObjectInputStream(Files.newInputStream(offsetFilePath))) { + // todo (cgardens) - we currently suppress a security warning for this line. use of readObject from + // untrusted sources is considered unsafe. Since the source is controlled by us in this case it + // should be safe. That said, changing this implementation to not use readObject would remove some + // headache. + final Object obj = is.readObject(); + if (!(obj instanceof HashMap)) + throw new ConnectException("Expected HashMap but found " + obj.getClass()); + final Map raw = (Map) obj; + final Map data = new HashMap<>(); + for (final Map.Entry mapEntry : raw.entrySet()) { + final ByteBuffer key = (mapEntry.getKey() != null) ? ByteBuffer.wrap(mapEntry.getKey()) : null; + final ByteBuffer value = (mapEntry.getValue() != null) ? ByteBuffer.wrap(mapEntry.getValue()) : null; + data.put(key, value); + } + + return data; + } catch (final NoSuchFileException | EOFException e) { + // NoSuchFileException: Ignore, may be new. + // EOFException: Ignore, this means the file was missing or corrupt + return Collections.emptyMap(); + } catch (final IOException | ClassNotFoundException e) { + throw new ConnectException(e); + } + } + + /** + * See FileOffsetBackingStore#save - logic is mostly borrowed from here. duplicated because this + * method is not public. + */ + private void save(final Map data) { + try (final ObjectOutputStream os = new ObjectOutputStream(Files.newOutputStream(offsetFilePath))) { + final Map raw = new HashMap<>(); + for (final Map.Entry mapEntry : data.entrySet()) { + final byte[] key = (mapEntry.getKey() != null) ? mapEntry.getKey().array() : null; + final byte[] value = (mapEntry.getValue() != null) ? mapEntry.getValue().array() : null; + raw.put(key, value); + } + os.writeObject(raw); + } catch (final IOException e) { + throw new ConnectException(e); + } + } + + public static AirbyteFileOffsetBackingStore initializeState(final JsonNode cdcState) { + final Path cdcWorkingDir; + try { + cdcWorkingDir = Files.createTempDirectory(Path.of("/tmp"), "cdc-state-offset"); + } catch (final IOException e) { + throw new RuntimeException(e); + } + final Path cdcOffsetFilePath = cdcWorkingDir.resolve("offset.dat"); + + final AirbyteFileOffsetBackingStore offsetManager = new AirbyteFileOffsetBackingStore(cdcOffsetFilePath); + offsetManager.persist(cdcState); + return offsetManager; + } + +} diff --git a/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/internals/AirbyteSchemaHistoryStorage.java b/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/internals/AirbyteSchemaHistoryStorage.java new file mode 100644 index 000000000000..0f4e37fd20ef --- /dev/null +++ b/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/internals/AirbyteSchemaHistoryStorage.java @@ -0,0 +1,149 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.debezium.internals; + +import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.commons.json.Jsons; +import io.debezium.document.Document; +import io.debezium.document.DocumentReader; +import io.debezium.document.DocumentWriter; +import io.debezium.relational.history.HistoryRecord; +import java.io.BufferedWriter; +import java.io.IOException; +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; +import java.nio.file.FileAlreadyExistsException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.util.Optional; +import java.util.function.Consumer; +import org.apache.commons.io.FileUtils; + +/** + * The purpose of this class is : to , 1. Read the contents of the file {@link #path} which contains + * the schema history at the end of the sync so that it can be saved in state for future syncs. + * Check {@link #read()} 2. Write the saved content back to the file {@link #path} at the beginning + * of the sync so that debezium can function smoothly. Check persist(Optional<JsonNode>). To + * understand more about file, please refer {@link FilteredFileDatabaseHistory} + */ +public class AirbyteSchemaHistoryStorage { + + private final Path path; + private static final Charset UTF8 = StandardCharsets.UTF_8; + private final DocumentReader reader = DocumentReader.defaultReader(); + private final DocumentWriter writer = DocumentWriter.defaultWriter(); + + public AirbyteSchemaHistoryStorage(final Path path) { + this.path = path; + } + + public Path getPath() { + return path; + } + + /** + * This implementation is kind of similar to + * {@link io.debezium.relational.history.FileDatabaseHistory#recoverRecords(Consumer)} + */ + public String read() { + final StringBuilder fileAsString = new StringBuilder(); + try { + for (final String line : Files.readAllLines(path, UTF8)) { + if (line != null && !line.isEmpty()) { + final Document record = reader.read(line); + final String recordAsString = writer.write(record); + fileAsString.append(recordAsString); + fileAsString.append(System.lineSeparator()); + } + } + return fileAsString.toString(); + } catch (final IOException e) { + throw new RuntimeException(e); + } + } + + /** + * This implementation is kind of similar to + * {@link io.debezium.relational.history.FileDatabaseHistory#start()} + */ + private void makeSureFileExists() { + try { + // Make sure the file exists ... + if (!Files.exists(path)) { + // Create parent directories if we have them ... + if (path.getParent() != null) { + Files.createDirectories(path.getParent()); + } + try { + Files.createFile(path); + } catch (final FileAlreadyExistsException e) { + // do nothing + } + } + } catch (final IOException e) { + throw new IllegalStateException( + "Unable to check or create history file at " + path + ": " + e.getMessage(), e); + } + } + + public void persist(final Optional schemaHistory) { + if (schemaHistory.isEmpty()) { + return; + } + final String fileAsString = Jsons.object(schemaHistory.get(), String.class); + + if (fileAsString == null || fileAsString.isEmpty()) { + return; + } + + FileUtils.deleteQuietly(path.toFile()); + makeSureFileExists(); + writeToFile(fileAsString); + } + + /** + * This implementation is kind of similar to + * {@link io.debezium.relational.history.FileDatabaseHistory#storeRecord(HistoryRecord)} + * + * @param fileAsString Represents the contents of the file saved in state from previous syncs + */ + private void writeToFile(final String fileAsString) { + try { + final String[] split = fileAsString.split(System.lineSeparator()); + for (final String element : split) { + final Document read = reader.read(element); + final String line = writer.write(read); + + try (final BufferedWriter historyWriter = Files + .newBufferedWriter(path, StandardOpenOption.APPEND)) { + try { + historyWriter.append(line); + historyWriter.newLine(); + } catch (final IOException e) { + throw new RuntimeException(e); + } + } + } + } catch (final IOException e) { + throw new RuntimeException(e); + } + } + + public static AirbyteSchemaHistoryStorage initializeDBHistory(final Optional schemaHistory) { + final Path dbHistoryWorkingDir; + try { + dbHistoryWorkingDir = Files.createTempDirectory(Path.of("/tmp"), "cdc-db-history"); + } catch (final IOException e) { + throw new RuntimeException(e); + } + final Path dbHistoryFilePath = dbHistoryWorkingDir.resolve("dbhistory.dat"); + + final AirbyteSchemaHistoryStorage schemaHistoryManager = new AirbyteSchemaHistoryStorage(dbHistoryFilePath); + schemaHistoryManager.persist(schemaHistory); + return schemaHistoryManager; + } + +} diff --git a/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/internals/DebeziumConverterUtils.java b/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/internals/DebeziumConverterUtils.java new file mode 100644 index 000000000000..ab0a9e6cde16 --- /dev/null +++ b/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/internals/DebeziumConverterUtils.java @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.debezium.internals; + +import io.airbyte.db.DataTypeUtils; +import io.debezium.spi.converter.RelationalColumn; +import java.sql.Date; +import java.sql.Timestamp; +import java.time.Duration; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.format.DateTimeParseException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public final class DebeziumConverterUtils { + + private static final Logger LOGGER = LoggerFactory.getLogger(DebeziumConverterUtils.class); + + private DebeziumConverterUtils() { + throw new UnsupportedOperationException(); + } + + public static String convertDate(final Object input) { + /** + * While building this custom converter we were not sure what type debezium could return cause there + * is no mention of it in the documentation. Secondly if you take a look at + * {@link io.debezium.connector.mysql.converters.TinyIntOneToBooleanConverter#converterFor(io.debezium.spi.converter.RelationalColumn, io.debezium.spi.converter.CustomConverter.ConverterRegistration)} + * method, even it is handling multiple data types but its not clear under what circumstances which + * data type would be returned. I just went ahead and handled the data types that made sense. + * Secondly, we use LocalDateTime to handle this cause it represents DATETIME datatype in JAVA + */ + if (input instanceof LocalDateTime) { + return DataTypeUtils.toISO8601String((LocalDateTime) input); + } else if (input instanceof LocalDate) { + return DataTypeUtils.toISO8601String((LocalDate) input); + } else if (input instanceof Duration) { + return DataTypeUtils.toISO8601String((Duration) input); + } else if (input instanceof Timestamp) { + return DataTypeUtils.toISO8601StringWithMicroseconds((((Timestamp) input).toInstant())); + } else if (input instanceof Number) { + return DataTypeUtils.toISO8601String( + new Timestamp(((Number) input).longValue()).toLocalDateTime()); + } else if (input instanceof Date) { + return DataTypeUtils.toISO8601String((Date) input); + } else if (input instanceof String) { + try { + return LocalDateTime.parse((String) input).toString(); + } catch (final DateTimeParseException e) { + LOGGER.warn("Cannot convert value '{}' to LocalDateTime type", input); + return input.toString(); + } + } + LOGGER.warn("Uncovered date class type '{}'. Use default converter", input.getClass().getName()); + return input.toString(); + } + + public static Object convertDefaultValue(RelationalColumn field) { + if (field.isOptional()) { + return null; + } else if (field.hasDefaultValue()) { + return field.defaultValue(); + } + return null; + } + +} diff --git a/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/internals/DebeziumEventUtils.java b/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/internals/DebeziumEventUtils.java new file mode 100644 index 000000000000..da31b6143210 --- /dev/null +++ b/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/internals/DebeziumEventUtils.java @@ -0,0 +1,67 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.debezium.internals; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import io.airbyte.commons.json.Jsons; +import io.airbyte.integrations.debezium.CdcMetadataInjector; +import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.AirbyteRecordMessage; +import io.debezium.engine.ChangeEvent; +import java.sql.Timestamp; +import java.time.Instant; + +public class DebeziumEventUtils { + + public static final String CDC_UPDATED_AT = "_ab_cdc_updated_at"; + public static final String CDC_DELETED_AT = "_ab_cdc_deleted_at"; + + public static AirbyteMessage toAirbyteMessage(final ChangeEvent event, + final CdcMetadataInjector cdcMetadataInjector, + final Instant emittedAt) { + final JsonNode debeziumRecord = Jsons.deserialize(event.value()); + final JsonNode before = debeziumRecord.get("before"); + final JsonNode after = debeziumRecord.get("after"); + final JsonNode source = debeziumRecord.get("source"); + + final JsonNode data = formatDebeziumData(before, after, source, cdcMetadataInjector); + final String schemaName = cdcMetadataInjector.namespace(source); + final String streamName = source.get("table").asText(); + + final AirbyteRecordMessage airbyteRecordMessage = new AirbyteRecordMessage() + .withStream(streamName) + .withNamespace(schemaName) + .withEmittedAt(emittedAt.toEpochMilli()) + .withData(data); + + return new AirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord(airbyteRecordMessage); + } + + // warning mutates input args. + private static JsonNode formatDebeziumData(final JsonNode before, + final JsonNode after, + final JsonNode source, + final CdcMetadataInjector cdcMetadataInjector) { + final ObjectNode base = (ObjectNode) (after.isNull() ? before : after); + + final long transactionMillis = source.get("ts_ms").asLong(); + final String transactionTimestamp = new Timestamp(transactionMillis).toInstant().toString(); + + base.put(CDC_UPDATED_AT, transactionTimestamp); + cdcMetadataInjector.addMetaData(base, source); + + if (after.isNull()) { + base.put(CDC_DELETED_AT, transactionTimestamp); + } else { + base.put(CDC_DELETED_AT, (String) null); + } + + return base; + } + +} diff --git a/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/internals/DebeziumRecordIterator.java b/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/internals/DebeziumRecordIterator.java new file mode 100644 index 000000000000..76305dabf259 --- /dev/null +++ b/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/internals/DebeziumRecordIterator.java @@ -0,0 +1,155 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.debezium.internals; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.collect.AbstractIterator; +import io.airbyte.commons.concurrency.VoidCallable; +import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.lang.MoreBooleans; +import io.airbyte.commons.util.AutoCloseableIterator; +import io.airbyte.integrations.debezium.CdcTargetPosition; +import io.debezium.engine.ChangeEvent; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * The record iterator is the consumer (in the producer / consumer relationship with debezium) + * responsible for 1. making sure every record produced by the record publisher is processed 2. + * signalling to the record publisher when it is time for it to stop producing records. It emits + * this signal either when the publisher had not produced a new record for a long time or when it + * has processed at least all of the records that were present in the database when the source was + * started. Because the publisher might publish more records between the consumer sending this + * signal and the publisher actually shutting down, the consumer must stay alive as long as the + * publisher is not closed. Even after the publisher is closed, the consumer will finish processing + * any produced records before closing. + */ +public class DebeziumRecordIterator extends AbstractIterator> + implements AutoCloseableIterator> { + + private static final Logger LOGGER = LoggerFactory.getLogger(DebeziumRecordIterator.class); + + private static final WaitTime FIRST_RECORD_WAIT_TIME_MINUTES = new WaitTime(5, TimeUnit.MINUTES); + private static final WaitTime SUBSEQUENT_RECORD_WAIT_TIME_SECONDS = new WaitTime(1, TimeUnit.MINUTES); + + private final LinkedBlockingQueue> queue; + private final CdcTargetPosition targetPosition; + private final Supplier publisherStatusSupplier; + private final VoidCallable requestClose; + private boolean receivedFirstRecord; + private boolean hasSnapshotFinished; + private boolean signalledClose; + + public DebeziumRecordIterator(final LinkedBlockingQueue> queue, + final CdcTargetPosition targetPosition, + final Supplier publisherStatusSupplier, + final VoidCallable requestClose) { + this.queue = queue; + this.targetPosition = targetPosition; + this.publisherStatusSupplier = publisherStatusSupplier; + this.requestClose = requestClose; + this.receivedFirstRecord = false; + this.hasSnapshotFinished = true; + this.signalledClose = false; + } + + @Override + protected ChangeEvent computeNext() { + // keep trying until the publisher is closed or until the queue is empty. the latter case is + // possible when the publisher has shutdown but the consumer has not yet processed all messages it + // emitted. + while (!MoreBooleans.isTruthy(publisherStatusSupplier.get()) || !queue.isEmpty()) { + final ChangeEvent next; + try { + final WaitTime waitTime = receivedFirstRecord ? SUBSEQUENT_RECORD_WAIT_TIME_SECONDS : FIRST_RECORD_WAIT_TIME_MINUTES; + next = queue.poll(waitTime.period, waitTime.timeUnit); + } catch (final InterruptedException e) { + throw new RuntimeException(e); + } + + // if within the timeout, the consumer could not get a record, it is time to tell the producer to + // shutdown. + if (next == null) { + LOGGER.info("Closing cause next is returned as null"); + requestClose(); + LOGGER.info("no record found. polling again."); + continue; + } + + final JsonNode eventAsJson = Jsons.deserialize(next.value()); + hasSnapshotFinished = hasSnapshotFinished(eventAsJson); + + // if the last record matches the target file position, it is time to tell the producer to shutdown. + if (!signalledClose && shouldSignalClose(eventAsJson)) { + requestClose(); + } + receivedFirstRecord = true; + return next; + } + return endOfData(); + } + + private boolean hasSnapshotFinished(final JsonNode eventAsJson) { + final SnapshotMetadata snapshot = SnapshotMetadata.valueOf(eventAsJson.get("source").get("snapshot").asText().toUpperCase()); + return SnapshotMetadata.TRUE != snapshot; + } + + /** + * Debezium was built as an ever running process which keeps on listening for new changes on DB and + * immediately processing them. Airbyte needs debezium to work as a start stop mechanism. In order + * to determine when to stop debezium engine we rely on few factors 1. TargetPosition logic. At the + * beginning of the sync we define a target position in the logs of the DB. This can be an LSN or + * anything specific to the DB which can help us identify that we have reached a specific position + * in the log based replication When we start processing records from debezium, we extract the the + * log position from the metadata of the record and compare it with our target that we defined at + * the beginning of the sync. If we have reached the target position, we shutdown the debezium + * engine 2. The TargetPosition logic might not always work and in order to tackle that we have + * another logic where if we do not receive records from debezium for a given duration, we ask + * debezium engine to shutdown 3. We also take the Snapshot into consideration, when a connector is + * running for the first time, we let it complete the snapshot and only after the completion of + * snapshot we should shutdown the engine. If we are closing the engine before completion of + * snapshot, we throw an exception + */ + @Override + public void close() throws Exception { + requestClose(); + } + + private boolean shouldSignalClose(final JsonNode eventAsJson) { + return targetPosition.reachedTargetPosition(eventAsJson); + } + + private void requestClose() { + try { + requestClose.call(); + signalledClose = true; + } catch (final Exception e) { + throw new RuntimeException(e); + } + throwExceptionIfSnapshotNotFinished(); + } + + private void throwExceptionIfSnapshotNotFinished() { + if (!hasSnapshotFinished) { + throw new RuntimeException("Closing down debezium engine but snapshot has not finished"); + } + } + + private static class WaitTime { + + public final int period; + public final TimeUnit timeUnit; + + public WaitTime(final int period, final TimeUnit timeUnit) { + this.period = period; + this.timeUnit = timeUnit; + } + + } + +} diff --git a/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/internals/DebeziumRecordPublisher.java b/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/internals/DebeziumRecordPublisher.java new file mode 100644 index 000000000000..c15c10815620 --- /dev/null +++ b/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/internals/DebeziumRecordPublisher.java @@ -0,0 +1,189 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.debezium.internals; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.annotations.VisibleForTesting; +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import io.airbyte.protocol.models.ConfiguredAirbyteStream; +import io.airbyte.protocol.models.SyncMode; +import io.debezium.engine.ChangeEvent; +import io.debezium.engine.DebeziumEngine; +import io.debezium.engine.format.Json; +import io.debezium.engine.spi.OffsetCommitPolicy; +import java.util.Optional; +import java.util.Properties; +import java.util.Queue; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; +import org.codehaus.plexus.util.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * The purpose of this class is to intiliaze and spawn the debezium engine with the right properties + * to fetch records + */ +public class DebeziumRecordPublisher implements AutoCloseable { + + private static final Logger LOGGER = LoggerFactory.getLogger(DebeziumRecordPublisher.class); + private final ExecutorService executor; + private DebeziumEngine> engine; + + private final JsonNode config; + private final AirbyteFileOffsetBackingStore offsetManager; + private final Optional schemaHistoryManager; + + private final AtomicBoolean hasClosed; + private final AtomicBoolean isClosing; + private final AtomicReference thrownError; + private final CountDownLatch engineLatch; + private final Properties properties; + private final ConfiguredAirbyteCatalog catalog; + + public DebeziumRecordPublisher(final Properties properties, + final JsonNode config, + final ConfiguredAirbyteCatalog catalog, + final AirbyteFileOffsetBackingStore offsetManager, + final Optional schemaHistoryManager) { + this.properties = properties; + this.config = config; + this.catalog = catalog; + this.offsetManager = offsetManager; + this.schemaHistoryManager = schemaHistoryManager; + this.hasClosed = new AtomicBoolean(false); + this.isClosing = new AtomicBoolean(false); + this.thrownError = new AtomicReference<>(); + this.executor = Executors.newSingleThreadExecutor(); + this.engineLatch = new CountDownLatch(1); + } + + public void start(final Queue> queue) { + engine = DebeziumEngine.create(Json.class) + .using(getDebeziumProperties()) + .using(new OffsetCommitPolicy.AlwaysCommitOffsetPolicy()) + .notifying(e -> { + // debezium outputs a tombstone event that has a value of null. this is an artifact of how it + // interacts with kafka. we want to ignore it. + // more on the tombstone: + // https://debezium.io/documentation/reference/configuration/event-flattening.html + if (e.value() != null) { + boolean inserted = false; + while (!inserted) { + inserted = queue.offer(e); + } + } + }) + .using((success, message, error) -> { + LOGGER.info("Debezium engine shutdown."); + thrownError.set(error); + engineLatch.countDown(); + }) + .build(); + + // Run the engine asynchronously ... + executor.execute(engine); + } + + public boolean hasClosed() { + return hasClosed.get(); + } + + public void close() throws Exception { + if (isClosing.compareAndSet(false, true)) { + // consumers should assume records can be produced until engine has closed. + if (engine != null) { + engine.close(); + } + + // wait for closure before shutting down executor service + engineLatch.await(5, TimeUnit.MINUTES); + + // shut down and await for thread to actually go down + executor.shutdown(); + executor.awaitTermination(5, TimeUnit.MINUTES); + + // after the engine is completely off, we can mark this as closed + hasClosed.set(true); + + if (thrownError.get() != null) { + throw new RuntimeException(thrownError.get()); + } + } + } + + protected Properties getDebeziumProperties() { + final Properties props = new Properties(); + props.putAll(properties); + + // debezium engine configuration + props.setProperty("name", "engine"); + props.setProperty("offset.storage", "org.apache.kafka.connect.storage.FileOffsetBackingStore"); + props.setProperty("offset.storage.file.filename", offsetManager.getOffsetFilePath().toString()); + props.setProperty("offset.flush.interval.ms", "1000"); // todo: make this longer + // default values from debezium CommonConnectorConfig + props.setProperty("max.batch.size", "2048"); + props.setProperty("max.queue.size", "8192"); + + if (schemaHistoryManager.isPresent()) { + // https://debezium.io/documentation/reference/1.9/operations/debezium-server.html#debezium-source-database-history-class + // https://debezium.io/documentation/reference/development/engine.html#_in_the_code + // As mentioned in the documents above, debezium connector for MySQL needs to track the schema + // changes. If we don't do this, we can't fetch records for the table + // We have implemented our own implementation to filter out the schema information from other + // databases that the connector is not syncing + props.setProperty("database.history", "io.airbyte.integrations.debezium.internals.FilteredFileDatabaseHistory"); + props.setProperty("database.history.file.filename", schemaHistoryManager.get().getPath().toString()); + } + + // https://debezium.io/documentation/reference/configuration/avro.html + props.setProperty("key.converter.schemas.enable", "false"); + props.setProperty("value.converter.schemas.enable", "false"); + + // debezium names + props.setProperty("name", config.get("database").asText()); + props.setProperty("database.server.name", config.get("database").asText()); + + // db connection configuration + props.setProperty("database.hostname", config.get("host").asText()); + props.setProperty("database.port", config.get("port").asText()); + props.setProperty("database.user", config.get("username").asText()); + props.setProperty("database.dbname", config.get("database").asText()); + + if (config.has("password")) { + props.setProperty("database.password", config.get("password").asText()); + } + + // By default "decimal.handing.mode=precise" which's caused returning this value as a binary. + // The "double" type may cause a loss of precision, so set Debezium's config to store it as a String + // explicitly in its Kafka messages for more details see: + // https://debezium.io/documentation/reference/1.9/connectors/postgresql.html#postgresql-decimal-types + // https://debezium.io/documentation/faq/#how_to_retrieve_decimal_field_from_binary_representation + props.setProperty("decimal.handling.mode", "string"); + + // table selection + final String tableWhitelist = getTableWhitelist(catalog); + props.setProperty("table.include.list", tableWhitelist); + + return props; + } + + @VisibleForTesting + public static String getTableWhitelist(final ConfiguredAirbyteCatalog catalog) { + return catalog.getStreams().stream() + .filter(s -> s.getSyncMode() == SyncMode.INCREMENTAL) + .map(ConfiguredAirbyteStream::getStream) + .map(stream -> stream.getNamespace() + "." + stream.getName()) + // debezium needs commas escaped to split properly + .map(x -> StringUtils.escape(x, new char[] {','}, "\\,")) + .collect(Collectors.joining(",")); + } + +} diff --git a/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/internals/FilteredFileDatabaseHistory.java b/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/internals/FilteredFileDatabaseHistory.java new file mode 100644 index 000000000000..f04690767537 --- /dev/null +++ b/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/internals/FilteredFileDatabaseHistory.java @@ -0,0 +1,149 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.debezium.internals; + +import io.debezium.config.Configuration; +import io.debezium.relational.history.AbstractDatabaseHistory; +import io.debezium.relational.history.DatabaseHistoryException; +import io.debezium.relational.history.DatabaseHistoryListener; +import io.debezium.relational.history.FileDatabaseHistory; +import io.debezium.relational.history.HistoryRecord; +import io.debezium.relational.history.HistoryRecord.Fields; +import io.debezium.relational.history.HistoryRecordComparator; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.util.function.Consumer; + +/** + * MySQL Debezium connector monitors the database schema evolution over the time and stores the data + * in a database history file. Without this file we can't fetch the records from binlog. We need to + * save the contents of the file. Debezium by default uses + * {@link io.debezium.relational.history.FileDatabaseHistory} class to write the schema information + * in the file. The problem is that the Debezium tracks the schema evolution of all the tables in + * all the databases, because of that the file content can grow. In order to make sure that debezium + * tracks only the schema of the tables that are present in the database that Airbyte is syncing, we + * created this class. In the method {@link #storeRecord(HistoryRecord)}, we introduced a check to + * make sure only those records are being saved whose database name matches the database Airbyte is + * syncing. We tell debezium to use this class by passing it as property in debezium engine. Look + * for "database.history" property in {@link DebeziumRecordPublisher#getDebeziumProperties()} + * Ideally {@link FilteredFileDatabaseHistory} should have extended + * {@link io.debezium.relational.history.FileDatabaseHistory} and overridden the + * {@link #storeRecord(HistoryRecord)} method but {@link FilteredFileDatabaseHistory} is a final + * class and can not be inherited + */ +public class FilteredFileDatabaseHistory extends AbstractDatabaseHistory { + + private final FileDatabaseHistory fileDatabaseHistory; + private static String databaseName; + + public FilteredFileDatabaseHistory() { + this.fileDatabaseHistory = new FileDatabaseHistory(); + } + + /** + * Ideally the databaseName should have been initialized in the constructor of the class. But since + * we supply the class name to debezium and it uses reflection to construct the object of the class, + * we can't pass in the databaseName as a parameter to the constructor. That's why we had to take + * the static approach. + * + * @param databaseName Name of the database that the connector is syncing + */ + public static void setDatabaseName(final String databaseName) { + if (FilteredFileDatabaseHistory.databaseName == null) { + FilteredFileDatabaseHistory.databaseName = databaseName; + } else if (!FilteredFileDatabaseHistory.databaseName.equals(databaseName)) { + throw new RuntimeException( + "Database name has already been set : " + FilteredFileDatabaseHistory.databaseName + + " can't set to : " + databaseName); + } + } + + @Override + public void configure(final Configuration config, + final HistoryRecordComparator comparator, + final DatabaseHistoryListener listener, + final boolean useCatalogBeforeSchema) { + fileDatabaseHistory.configure(config, comparator, listener, useCatalogBeforeSchema); + } + + @Override + public void start() { + fileDatabaseHistory.start(); + } + + @Override + public void storeRecord(final HistoryRecord record) throws DatabaseHistoryException { + if (record == null) { + return; + } + try { + final String dbNameInRecord = record.document().getString(Fields.DATABASE_NAME); + if (databaseName != null && dbNameInRecord != null && !dbNameInRecord.equals(databaseName)) { + return; + } + + /** + * We are using reflection because the method + * {@link io.debezium.relational.history.FileDatabaseHistory#storeRecord(HistoryRecord)} is + * protected and can not be accessed from here + */ + final Method storeRecordMethod = fileDatabaseHistory.getClass() + .getDeclaredMethod("storeRecord", record.getClass()); + storeRecordMethod.setAccessible(true); + storeRecordMethod.invoke(fileDatabaseHistory, record); + } catch (final NoSuchMethodException | InvocationTargetException | IllegalAccessException e) { + throw new RuntimeException(e); + } + } + + @Override + public void stop() { + fileDatabaseHistory.stop(); + // this is just for tests + resetDbName(); + } + + public static void resetDbName() { + databaseName = null; + } + + @Override + protected void recoverRecords(final Consumer records) { + try { + /** + * We are using reflection because the method + * {@link io.debezium.relational.history.FileDatabaseHistory#recoverRecords(Consumer)} is protected + * and can not be accessed from here + */ + final Method recoverRecords = fileDatabaseHistory.getClass() + .getDeclaredMethod("recoverRecords", Consumer.class); + recoverRecords.setAccessible(true); + recoverRecords.invoke(fileDatabaseHistory, records); + } catch (final NoSuchMethodException | InvocationTargetException | IllegalAccessException e) { + throw new RuntimeException(e); + } + } + + @Override + public boolean storageExists() { + return fileDatabaseHistory.storageExists(); + } + + @Override + public void initializeStorage() { + fileDatabaseHistory.initializeStorage(); + } + + @Override + public boolean exists() { + return fileDatabaseHistory.exists(); + } + + @Override + public String toString() { + return fileDatabaseHistory.toString(); + } + +} diff --git a/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/internals/MSSQLConverter.java b/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/internals/MSSQLConverter.java new file mode 100644 index 000000000000..9420b8ba9758 --- /dev/null +++ b/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/internals/MSSQLConverter.java @@ -0,0 +1,87 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.debezium.internals; + +import io.airbyte.db.DataTypeUtils; +import io.debezium.spi.converter.CustomConverter; +import io.debezium.spi.converter.RelationalColumn; +import java.math.BigDecimal; +import java.sql.Timestamp; +import java.util.Objects; +import java.util.Properties; +import java.util.Set; +import org.apache.kafka.connect.data.SchemaBuilder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class MSSQLConverter implements CustomConverter { + + private final Logger LOGGER = LoggerFactory.getLogger(MSSQLConverter.class); + + private final Set DATE_TYPES = Set.of("DATE", "DATETIME", "DATETIME2", "DATETIMEOFFSET", "SMALLDATETIME"); + private final String TIME_TYPE = "TIME"; + private final String SMALLMONEY_TYPE = "SMALLMONEY"; + + @Override + public void configure(Properties props) {} + + @Override + public void converterFor(final RelationalColumn field, + final ConverterRegistration registration) { + if (DATE_TYPES.contains(field.typeName().toUpperCase())) { + registerDate(field, registration); + } else if (SMALLMONEY_TYPE.equalsIgnoreCase(field.typeName())) { + registerMoney(field, registration); + } else if (TIME_TYPE.equalsIgnoreCase(field.typeName())) { + registerTime(field, registration); + } + } + + private void registerDate(final RelationalColumn field, + final ConverterRegistration registration) { + registration.register(SchemaBuilder.string(), input -> { + if (Objects.isNull(input)) { + return DebeziumConverterUtils.convertDefaultValue(field); + } + + return DebeziumConverterUtils.convertDate(input); + }); + } + + private void registerTime(final RelationalColumn field, + final ConverterRegistration registration) { + registration.register(SchemaBuilder.string(), input -> { + if (Objects.isNull(input)) { + return DebeziumConverterUtils.convertDefaultValue(field); + } + + if (input instanceof Timestamp) { + return DataTypeUtils.toISOTimeString(((Timestamp) input).toLocalDateTime()); + } + + LOGGER.warn("Uncovered time class type '{}'. Use default converter", + input.getClass().getName()); + return input.toString(); + }); + } + + private void registerMoney(final RelationalColumn field, + final ConverterRegistration registration) { + registration.register(SchemaBuilder.float64(), input -> { + if (Objects.isNull(input)) { + return DebeziumConverterUtils.convertDefaultValue(field); + } + + if (input instanceof BigDecimal) { + return ((BigDecimal) input).doubleValue(); + } + + LOGGER.warn("Uncovered money class type '{}'. Use default converter", + input.getClass().getName()); + return input.toString(); + }); + } + +} diff --git a/airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/MySQLConverter.java b/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/internals/MySQLConverter.java similarity index 100% rename from airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/MySQLConverter.java rename to airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/internals/MySQLConverter.java diff --git a/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/internals/PostgresConverter.java b/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/internals/PostgresConverter.java new file mode 100644 index 000000000000..aee741b6aaca --- /dev/null +++ b/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/internals/PostgresConverter.java @@ -0,0 +1,134 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.debezium.internals; + +import io.debezium.spi.converter.CustomConverter; +import io.debezium.spi.converter.RelationalColumn; +import java.math.BigDecimal; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; +import java.util.Properties; +import org.apache.kafka.connect.data.SchemaBuilder; +import org.postgresql.util.PGInterval; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class PostgresConverter implements CustomConverter { + + private static final Logger LOGGER = LoggerFactory.getLogger(PostgresConverter.class); + + private final String[] DATE_TYPES = {"DATE", "DATETIME", "TIME", "TIMETZ", "INTERVAL", "TIMESTAMP"}; + private final String[] BIT_TYPES = {"BIT", "VARBIT"}; + private final String[] MONEY_ITEM_TYPE = {"MONEY"}; + private final String[] GEOMETRICS_TYPES = {"BOX", "CIRCLE", "LINE", "LSEG", "POINT", "POLYGON", "PATH"}; + private final String[] TEXT_TYPES = + {"VARCHAR", "VARBINARY", "BLOB", "TEXT", "LONGTEXT", "TINYTEXT", "MEDIUMTEXT", "INVENTORY_ITEM", "TSVECTOR", "TSQUERY"}; + + @Override + public void configure(final Properties props) {} + + @Override + public void converterFor(final RelationalColumn field, final ConverterRegistration registration) { + if (Arrays.stream(DATE_TYPES).anyMatch(s -> s.equalsIgnoreCase(field.typeName()))) { + registerDate(field, registration); + } else if (Arrays.stream(TEXT_TYPES).anyMatch(s -> s.equalsIgnoreCase(field.typeName())) + || Arrays.stream(GEOMETRICS_TYPES).anyMatch(s -> s.equalsIgnoreCase(field.typeName())) + || Arrays.stream(BIT_TYPES).anyMatch(s -> s.equalsIgnoreCase(field.typeName()))) { + registerText(field, registration); + } else if (Arrays.stream(MONEY_ITEM_TYPE).anyMatch(s -> s.equalsIgnoreCase(field.typeName()))) { + registerMoney(field, registration); + } + } + + private void registerText(final RelationalColumn field, final ConverterRegistration registration) { + registration.register(SchemaBuilder.string().optional(), x -> { + if (x == null) { + return DebeziumConverterUtils.convertDefaultValue(field); + } + + if (x instanceof byte[]) { + return new String((byte[]) x, StandardCharsets.UTF_8); + } else { + return x.toString(); + } + }); + } + + private void registerDate(final RelationalColumn field, final ConverterRegistration registration) { + registration.register(SchemaBuilder.string().optional(), x -> { + if (x == null) { + return DebeziumConverterUtils.convertDefaultValue(field); + } else if (x instanceof PGInterval) { + return convertInterval((PGInterval) x); + } else { + return DebeziumConverterUtils.convertDate(x); + } + }); + } + + private String convertInterval(final PGInterval pgInterval) { + final StringBuilder resultInterval = new StringBuilder(); + formatDateUnit(resultInterval, pgInterval.getYears(), " year "); + formatDateUnit(resultInterval, pgInterval.getMonths(), " mons "); + formatDateUnit(resultInterval, pgInterval.getDays(), " days "); + + formatTimeValues(resultInterval, pgInterval); + return resultInterval.toString(); + } + + private void registerMoney(final RelationalColumn field, final ConverterRegistration registration) { + registration.register(SchemaBuilder.string().optional(), x -> { + if (x == null) { + return DebeziumConverterUtils.convertDefaultValue(field); + } else if (x instanceof Double) { + final BigDecimal result = BigDecimal.valueOf((Double) x); + if (result.compareTo(new BigDecimal("999999999999999")) == 1 + || result.compareTo(new BigDecimal("-999999999999999")) == -1) { + return null; + } + return result.toString(); + } else { + return x.toString(); + } + }); + } + + private void formatDateUnit(final StringBuilder resultInterval, final int dateUnit, final String s) { + if (dateUnit != 0) { + resultInterval + .append(dateUnit) + .append(s); + } + } + + private void formatTimeValues(final StringBuilder resultInterval, final PGInterval pgInterval) { + if (isNegativeTime(pgInterval)) { + resultInterval.append("-"); + } + // TODO check if value more or less than Integer.MIN_VALUE Integer.MAX_VALUE, + final int hours = Math.abs(pgInterval.getHours()); + final int minutes = Math.abs(pgInterval.getMinutes()); + final int seconds = Math.abs(pgInterval.getWholeSeconds()); + resultInterval.append(addFirstDigit(hours)); + resultInterval.append(hours); + resultInterval.append(":"); + resultInterval.append(addFirstDigit(minutes)); + resultInterval.append(minutes); + resultInterval.append(":"); + resultInterval.append(addFirstDigit(seconds)); + resultInterval.append(seconds); + } + + private String addFirstDigit(final int hours) { + return hours <= 9 ? "0" : ""; + } + + private boolean isNegativeTime(final PGInterval pgInterval) { + return pgInterval.getHours() < 0 + || pgInterval.getMinutes() < 0 + || pgInterval.getWholeSeconds() < 0; + } + +} diff --git a/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/internals/SnapshotMetadata.java b/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/internals/SnapshotMetadata.java new file mode 100644 index 000000000000..b24cdf71fbe6 --- /dev/null +++ b/airbyte-integrations/bases/debezium-v1-9-2/src/main/java/io/airbyte/integrations/debezium/internals/SnapshotMetadata.java @@ -0,0 +1,11 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.debezium.internals; + +public enum SnapshotMetadata { + TRUE, + FALSE, + LAST +} diff --git a/airbyte-integrations/bases/debezium-v1-9-2/src/test/java/io/airbyte/integrations/debezium/AirbyteDebeziumHandlerTest.java b/airbyte-integrations/bases/debezium-v1-9-2/src/test/java/io/airbyte/integrations/debezium/AirbyteDebeziumHandlerTest.java new file mode 100644 index 000000000000..45d50612f792 --- /dev/null +++ b/airbyte-integrations/bases/debezium-v1-9-2/src/test/java/io/airbyte/integrations/debezium/AirbyteDebeziumHandlerTest.java @@ -0,0 +1,56 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.debezium; + +import com.google.common.collect.Lists; +import io.airbyte.protocol.models.AirbyteCatalog; +import io.airbyte.protocol.models.CatalogHelpers; +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import io.airbyte.protocol.models.Field; +import io.airbyte.protocol.models.JsonSchemaType; +import io.airbyte.protocol.models.SyncMode; +import java.util.List; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +public class AirbyteDebeziumHandlerTest { + + @Test + public void shouldUseCdcTestShouldReturnTrue() { + final AirbyteCatalog catalog = new AirbyteCatalog().withStreams(List.of( + CatalogHelpers.createAirbyteStream( + "MODELS_STREAM_NAME", + "MODELS_SCHEMA", + Field.of("COL_ID", JsonSchemaType.NUMBER), + Field.of("COL_MAKE_ID", JsonSchemaType.NUMBER), + Field.of("COL_MODEL", JsonSchemaType.STRING)) + .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) + .withSourceDefinedPrimaryKey(List.of(List.of("COL_ID"))))); + final ConfiguredAirbyteCatalog configuredCatalog = CatalogHelpers + .toDefaultConfiguredCatalog(catalog); + // set all streams to incremental. + configuredCatalog.getStreams().forEach(s -> s.setSyncMode(SyncMode.INCREMENTAL)); + + Assertions.assertTrue(AirbyteDebeziumHandler.shouldUseCDC(configuredCatalog)); + } + + @Test + public void shouldUseCdcTestShouldReturnFalse() { + final AirbyteCatalog catalog = new AirbyteCatalog().withStreams(List.of( + CatalogHelpers.createAirbyteStream( + "MODELS_STREAM_NAME", + "MODELS_SCHEMA", + Field.of("COL_ID", JsonSchemaType.NUMBER), + Field.of("COL_MAKE_ID", JsonSchemaType.NUMBER), + Field.of("COL_MODEL", JsonSchemaType.STRING)) + .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) + .withSourceDefinedPrimaryKey(List.of(List.of("COL_ID"))))); + final ConfiguredAirbyteCatalog configuredCatalog = CatalogHelpers + .toDefaultConfiguredCatalog(catalog); + + Assertions.assertFalse(AirbyteDebeziumHandler.shouldUseCDC(configuredCatalog)); + } + +} diff --git a/airbyte-integrations/bases/debezium-v1-9-2/src/test/java/io/airbyte/integrations/debezium/AirbyteFileOffsetBackingStoreTest.java b/airbyte-integrations/bases/debezium-v1-9-2/src/test/java/io/airbyte/integrations/debezium/AirbyteFileOffsetBackingStoreTest.java new file mode 100644 index 000000000000..9f1e6d0ea052 --- /dev/null +++ b/airbyte-integrations/bases/debezium-v1-9-2/src/test/java/io/airbyte/integrations/debezium/AirbyteFileOffsetBackingStoreTest.java @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.debezium; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.commons.io.IOs; +import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.resources.MoreResources; +import io.airbyte.integrations.debezium.internals.AirbyteFileOffsetBackingStore; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Map; +import org.junit.jupiter.api.Test; + +class AirbyteFileOffsetBackingStoreTest { + + @SuppressWarnings("UnstableApiUsage") + @Test + void test() throws IOException { + final Path testRoot = Files.createTempDirectory(Path.of("/tmp"), "offset-store-test"); + + final byte[] bytes = MoreResources.readBytes("test_debezium_offset.dat"); + final Path templateFilePath = testRoot.resolve("template_offset.dat"); + IOs.writeFile(templateFilePath, bytes); + + final Path writeFilePath = testRoot.resolve("offset.dat"); + + final AirbyteFileOffsetBackingStore offsetStore = new AirbyteFileOffsetBackingStore(templateFilePath); + final Map offset = offsetStore.read(); + + final JsonNode asJson = Jsons.jsonNode(offset); + + final AirbyteFileOffsetBackingStore offsetStore2 = new AirbyteFileOffsetBackingStore(writeFilePath); + offsetStore2.persist(asJson); + + final Map stateFromOffsetStoreRoundTrip = offsetStore2.read(); + + // verify that, after a round trip through the offset store, we get back the same data. + assertEquals(offset, stateFromOffsetStoreRoundTrip); + // verify that the file written by the offset store is identical to the template file. + assertTrue(com.google.common.io.Files.equal(templateFilePath.toFile(), writeFilePath.toFile())); + } + +} diff --git a/airbyte-integrations/bases/debezium-v1-9-2/src/test/java/io/airbyte/integrations/debezium/DebeziumEventUtilsTest.java b/airbyte-integrations/bases/debezium-v1-9-2/src/test/java/io/airbyte/integrations/debezium/DebeziumEventUtilsTest.java new file mode 100644 index 000000000000..4de1b36524dd --- /dev/null +++ b/airbyte-integrations/bases/debezium-v1-9-2/src/test/java/io/airbyte/integrations/debezium/DebeziumEventUtilsTest.java @@ -0,0 +1,88 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.debezium; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.resources.MoreResources; +import io.airbyte.integrations.debezium.internals.DebeziumEventUtils; +import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.AirbyteRecordMessage; +import io.debezium.engine.ChangeEvent; +import java.io.IOException; +import java.time.Instant; +import org.junit.jupiter.api.Test; + +class DebeziumEventUtilsTest { + + @Test + public void testConvertChangeEvent() throws IOException { + final String stream = "names"; + final Instant emittedAt = Instant.now(); + final CdcMetadataInjector cdcMetadataInjector = new DummyMetadataInjector(); + final ChangeEvent insertChangeEvent = mockChangeEvent("insert_change_event.json"); + final ChangeEvent updateChangeEvent = mockChangeEvent("update_change_event.json"); + final ChangeEvent deleteChangeEvent = mockChangeEvent("delete_change_event.json"); + + final AirbyteMessage actualInsert = DebeziumEventUtils.toAirbyteMessage(insertChangeEvent, cdcMetadataInjector, emittedAt); + final AirbyteMessage actualUpdate = DebeziumEventUtils.toAirbyteMessage(updateChangeEvent, cdcMetadataInjector, emittedAt); + final AirbyteMessage actualDelete = DebeziumEventUtils.toAirbyteMessage(deleteChangeEvent, cdcMetadataInjector, emittedAt); + + final AirbyteMessage expectedInsert = createAirbyteMessage(stream, emittedAt, "insert_message.json"); + final AirbyteMessage expectedUpdate = createAirbyteMessage(stream, emittedAt, "update_message.json"); + final AirbyteMessage expectedDelete = createAirbyteMessage(stream, emittedAt, "delete_message.json"); + + deepCompare(expectedInsert, actualInsert); + deepCompare(expectedUpdate, actualUpdate); + deepCompare(expectedDelete, actualDelete); + } + + private static ChangeEvent mockChangeEvent(final String resourceName) throws IOException { + final ChangeEvent mocked = mock(ChangeEvent.class); + final String resource = MoreResources.readResource(resourceName); + when(mocked.value()).thenReturn(resource); + + return mocked; + } + + private static AirbyteMessage createAirbyteMessage(final String stream, final Instant emittedAt, final String resourceName) throws IOException { + final String data = MoreResources.readResource(resourceName); + + final AirbyteRecordMessage recordMessage = new AirbyteRecordMessage() + .withStream(stream) + .withNamespace("public") + .withData(Jsons.deserialize(data)) + .withEmittedAt(emittedAt.toEpochMilli()); + + return new AirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord(recordMessage); + } + + private static void deepCompare(final Object expected, final Object actual) { + assertEquals(Jsons.deserialize(Jsons.serialize(expected)), Jsons.deserialize(Jsons.serialize(actual))); + } + + public static class DummyMetadataInjector implements CdcMetadataInjector { + + @Override + public void addMetaData(final ObjectNode event, final JsonNode source) { + final long lsn = source.get("lsn").asLong(); + event.put("_ab_cdc_lsn", lsn); + } + + @Override + public String namespace(final JsonNode source) { + return source.get("schema").asText(); + } + + } + +} diff --git a/airbyte-integrations/bases/debezium-v1-9-2/src/test/java/io/airbyte/integrations/debezium/DebeziumRecordPublisherTest.java b/airbyte-integrations/bases/debezium-v1-9-2/src/test/java/io/airbyte/integrations/debezium/DebeziumRecordPublisherTest.java new file mode 100644 index 000000000000..31dacbc563ae --- /dev/null +++ b/airbyte-integrations/bases/debezium-v1-9-2/src/test/java/io/airbyte/integrations/debezium/DebeziumRecordPublisherTest.java @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.debezium; + +import static org.junit.jupiter.api.Assertions.*; + +import com.google.common.collect.ImmutableList; +import io.airbyte.integrations.debezium.internals.DebeziumRecordPublisher; +import io.airbyte.protocol.models.CatalogHelpers; +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import io.airbyte.protocol.models.SyncMode; +import org.junit.jupiter.api.Test; + +class DebeziumRecordPublisherTest { + + @Test + public void testWhitelistCreation() { + final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(ImmutableList.of( + CatalogHelpers.createConfiguredAirbyteStream("id_and_name", "public").withSyncMode(SyncMode.INCREMENTAL), + CatalogHelpers.createConfiguredAirbyteStream("id_,something", "public").withSyncMode(SyncMode.INCREMENTAL), + CatalogHelpers.createConfiguredAirbyteStream("n\"aMĆ©S", "public").withSyncMode(SyncMode.INCREMENTAL))); + + final String expectedWhitelist = "public.id_and_name,public.id_\\,something,public.n\"aMĆ©S"; + final String actualWhitelist = DebeziumRecordPublisher.getTableWhitelist(catalog); + + assertEquals(expectedWhitelist, actualWhitelist); + } + + @Test + public void testWhitelistFiltersFullRefresh() { + final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(ImmutableList.of( + CatalogHelpers.createConfiguredAirbyteStream("id_and_name", "public").withSyncMode(SyncMode.INCREMENTAL), + CatalogHelpers.createConfiguredAirbyteStream("id_and_name2", "public").withSyncMode(SyncMode.FULL_REFRESH))); + + final String expectedWhitelist = "public.id_and_name"; + final String actualWhitelist = DebeziumRecordPublisher.getTableWhitelist(catalog); + + assertEquals(expectedWhitelist, actualWhitelist); + } + +} diff --git a/airbyte-integrations/bases/debezium-v1-9-2/src/test/java/io/airbyte/integrations/debezium/internals/DebeziumConverterUtilsTest.java b/airbyte-integrations/bases/debezium-v1-9-2/src/test/java/io/airbyte/integrations/debezium/internals/DebeziumConverterUtilsTest.java new file mode 100644 index 000000000000..facb86d0bc54 --- /dev/null +++ b/airbyte-integrations/bases/debezium-v1-9-2/src/test/java/io/airbyte/integrations/debezium/internals/DebeziumConverterUtilsTest.java @@ -0,0 +1,102 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.debezium.internals; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import io.debezium.spi.converter.RelationalColumn; +import java.sql.Timestamp; +import java.time.Duration; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + +class DebeziumConverterUtilsTest { + + @Test + public void convertDefaultValueTest() { + + final RelationalColumn relationalColumn = mock(RelationalColumn.class); + + when(relationalColumn.isOptional()).thenReturn(true); + Object actualColumnDefaultValue = DebeziumConverterUtils.convertDefaultValue(relationalColumn); + Assertions.assertNull(actualColumnDefaultValue, "Default value for optional relational column should be null"); + + when(relationalColumn.isOptional()).thenReturn(false); + when(relationalColumn.hasDefaultValue()).thenReturn(false); + actualColumnDefaultValue = DebeziumConverterUtils.convertDefaultValue(relationalColumn); + Assertions.assertNull(actualColumnDefaultValue); + + when(relationalColumn.isOptional()).thenReturn(false); + when(relationalColumn.hasDefaultValue()).thenReturn(true); + final String expectedColumnDefaultValue = "default value"; + when(relationalColumn.defaultValue()).thenReturn(expectedColumnDefaultValue); + actualColumnDefaultValue = DebeziumConverterUtils.convertDefaultValue(relationalColumn); + Assertions.assertEquals(actualColumnDefaultValue, expectedColumnDefaultValue); + } + + @Test + public void convertLocalDate() { + final LocalDate localDate = LocalDate.of(2021, 1, 1); + + final String actual = DebeziumConverterUtils.convertDate(localDate); + Assertions.assertEquals("2021-01-01T00:00:00Z", actual); + } + + @Test + public void convertTLocalTime() { + final LocalTime localTime = LocalTime.of(8, 1, 1); + final String actual = DebeziumConverterUtils.convertDate(localTime); + Assertions.assertEquals("08:01:01", actual); + } + + @Test + public void convertLocalDateTime() { + final LocalDateTime localDateTime = LocalDateTime.of(2021, 1, 1, 8, 1, 1); + + final String actual = DebeziumConverterUtils.convertDate(localDateTime); + Assertions.assertEquals("2021-01-01T08:01:01Z", actual); + } + + @Test + @Disabled + public void convertDuration() { + final Duration duration = Duration.ofHours(100_000); + + final String actual = DebeziumConverterUtils.convertDate(duration); + Assertions.assertEquals("1981-05-29T20:00:00Z", actual); + } + + @Test + public void convertTimestamp() { + final LocalDateTime localDateTime = LocalDateTime.of(2021, 1, 1, 8, 1, 1); + final Timestamp timestamp = Timestamp.valueOf(localDateTime); + + final String actual = DebeziumConverterUtils.convertDate(timestamp); + Assertions.assertEquals("2021-01-01T08:01:01.000000Z", actual); + } + + @Test + @Disabled + public void convertNumber() { + final Number number = 100_000; + + final String actual = DebeziumConverterUtils.convertDate(number); + Assertions.assertEquals("1970-01-01T03:01:40Z", actual); + } + + @Test + public void convertStringDateFormat() { + final String stringValue = "2021-01-01T00:00:00Z"; + + final String actual = DebeziumConverterUtils.convertDate(stringValue); + Assertions.assertEquals("2021-01-01T00:00:00Z", actual); + } + +} diff --git a/airbyte-integrations/bases/debezium-v1-9-2/src/test/resources/delete_change_event.json b/airbyte-integrations/bases/debezium-v1-9-2/src/test/resources/delete_change_event.json new file mode 100644 index 000000000000..07b575bf7e2c --- /dev/null +++ b/airbyte-integrations/bases/debezium-v1-9-2/src/test/resources/delete_change_event.json @@ -0,0 +1,25 @@ +{ + "before": { + "first_name": "san", + "last_name": "goku", + "power": null + }, + "after": null, + "source": { + "version": "1.4.2.Final", + "connector": "postgresql", + "name": "orders", + "ts_ms": 1616775646886, + "snapshot": false, + "db": "db_lwfoyffqvx", + "schema": "public", + "table": "names", + "txId": 498, + "lsn": 23012360, + "xmin": null + }, + "op": "d", + "ts_ms": 1616775646931, + "transaction": null, + "destination": "orders.public.names" +} diff --git a/airbyte-integrations/bases/debezium-v1-9-2/src/test/resources/delete_message.json b/airbyte-integrations/bases/debezium-v1-9-2/src/test/resources/delete_message.json new file mode 100644 index 000000000000..676ee5b74ffe --- /dev/null +++ b/airbyte-integrations/bases/debezium-v1-9-2/src/test/resources/delete_message.json @@ -0,0 +1,8 @@ +{ + "first_name": "san", + "last_name": "goku", + "power": null, + "_ab_cdc_updated_at": "2021-03-26T16:20:46.886Z", + "_ab_cdc_lsn": 23012360, + "_ab_cdc_deleted_at": "2021-03-26T16:20:46.886Z" +} diff --git a/airbyte-integrations/bases/debezium-v1-9-2/src/test/resources/insert_change_event.json b/airbyte-integrations/bases/debezium-v1-9-2/src/test/resources/insert_change_event.json new file mode 100644 index 000000000000..4b2c2fb6e2cf --- /dev/null +++ b/airbyte-integrations/bases/debezium-v1-9-2/src/test/resources/insert_change_event.json @@ -0,0 +1,25 @@ +{ + "before": null, + "after": { + "first_name": "san", + "last_name": "goku", + "power": "Infinity" + }, + "source": { + "version": "1.4.2.Final", + "connector": "postgresql", + "name": "orders", + "ts_ms": 1616775642623, + "snapshot": true, + "db": "db_lwfoyffqvx", + "schema": "public", + "table": "names", + "txId": 495, + "lsn": 23011544, + "xmin": null + }, + "op": "r", + "ts_ms": 1616775642624, + "transaction": null, + "destination": "orders.public.names" +} diff --git a/airbyte-integrations/bases/debezium-v1-9-2/src/test/resources/insert_message.json b/airbyte-integrations/bases/debezium-v1-9-2/src/test/resources/insert_message.json new file mode 100644 index 000000000000..d971d32c1766 --- /dev/null +++ b/airbyte-integrations/bases/debezium-v1-9-2/src/test/resources/insert_message.json @@ -0,0 +1,8 @@ +{ + "first_name": "san", + "last_name": "goku", + "power": "Infinity", + "_ab_cdc_updated_at": "2021-03-26T16:20:42.623Z", + "_ab_cdc_lsn": 23011544, + "_ab_cdc_deleted_at": null +} diff --git a/airbyte-integrations/bases/debezium-v1-9-2/src/test/resources/test_debezium_offset.dat b/airbyte-integrations/bases/debezium-v1-9-2/src/test/resources/test_debezium_offset.dat new file mode 100644 index 000000000000..c7e7054916ed Binary files /dev/null and b/airbyte-integrations/bases/debezium-v1-9-2/src/test/resources/test_debezium_offset.dat differ diff --git a/airbyte-integrations/bases/debezium-v1-9-2/src/test/resources/update_change_event.json b/airbyte-integrations/bases/debezium-v1-9-2/src/test/resources/update_change_event.json new file mode 100644 index 000000000000..da5dcd9c2b06 --- /dev/null +++ b/airbyte-integrations/bases/debezium-v1-9-2/src/test/resources/update_change_event.json @@ -0,0 +1,25 @@ +{ + "before": null, + "after": { + "first_name": "san", + "last_name": "goku", + "power": 10000.2 + }, + "source": { + "version": "1.4.2.Final", + "connector": "postgresql", + "name": "orders", + "ts_ms": 1616775646881, + "snapshot": false, + "db": "db_lwfoyffqvx", + "schema": "public", + "table": "names", + "txId": 497, + "lsn": 23012216, + "xmin": null + }, + "op": "u", + "ts_ms": 1616775646929, + "transaction": null, + "destination": "orders.public.names" +} diff --git a/airbyte-integrations/bases/debezium-v1-9-2/src/test/resources/update_message.json b/airbyte-integrations/bases/debezium-v1-9-2/src/test/resources/update_message.json new file mode 100644 index 000000000000..89b9a08038aa --- /dev/null +++ b/airbyte-integrations/bases/debezium-v1-9-2/src/test/resources/update_message.json @@ -0,0 +1,8 @@ +{ + "first_name": "san", + "last_name": "goku", + "power": 10000.2, + "_ab_cdc_updated_at": "2021-03-26T16:20:46.881Z", + "_ab_cdc_lsn": 23012216, + "_ab_cdc_deleted_at": null +} diff --git a/airbyte-integrations/bases/debezium-v1-9-2/src/testFixtures/java/io/airbyte/integrations/debezium/CdcSourceTest.java b/airbyte-integrations/bases/debezium-v1-9-2/src/testFixtures/java/io/airbyte/integrations/debezium/CdcSourceTest.java new file mode 100644 index 000000000000..441de6ff481e --- /dev/null +++ b/airbyte-integrations/bases/debezium-v1-9-2/src/testFixtures/java/io/airbyte/integrations/debezium/CdcSourceTest.java @@ -0,0 +1,625 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.debezium; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; +import com.google.common.collect.Sets; +import com.google.common.collect.Streams; +import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.util.AutoCloseableIterator; +import io.airbyte.commons.util.AutoCloseableIterators; +import io.airbyte.db.Database; +import io.airbyte.integrations.base.Source; +import io.airbyte.protocol.models.AirbyteCatalog; +import io.airbyte.protocol.models.AirbyteConnectionStatus; +import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.AirbyteMessage.Type; +import io.airbyte.protocol.models.AirbyteRecordMessage; +import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.CatalogHelpers; +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import io.airbyte.protocol.models.ConfiguredAirbyteStream; +import io.airbyte.protocol.models.Field; +import io.airbyte.protocol.models.JsonSchemaType; +import io.airbyte.protocol.models.SyncMode; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public abstract class CdcSourceTest { + + private static final Logger LOGGER = LoggerFactory.getLogger(CdcSourceTest.class); + + protected static final String MODELS_SCHEMA = "models_schema"; + protected static final String MODELS_STREAM_NAME = "models"; + private static final Set STREAM_NAMES = Sets + .newHashSet(MODELS_STREAM_NAME); + protected static final String COL_ID = "id"; + protected static final String COL_MAKE_ID = "make_id"; + protected static final String COL_MODEL = "model"; + + protected static final AirbyteCatalog CATALOG = new AirbyteCatalog().withStreams(List.of( + CatalogHelpers.createAirbyteStream( + MODELS_STREAM_NAME, + MODELS_SCHEMA, + Field.of(COL_ID, JsonSchemaType.NUMBER), + Field.of(COL_MAKE_ID, JsonSchemaType.NUMBER), + Field.of(COL_MODEL, JsonSchemaType.STRING)) + .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) + .withSourceDefinedPrimaryKey(List.of(List.of(COL_ID))))); + protected static final ConfiguredAirbyteCatalog CONFIGURED_CATALOG = CatalogHelpers + .toDefaultConfiguredCatalog(CATALOG); + + // set all streams to incremental. + static { + CONFIGURED_CATALOG.getStreams().forEach(s -> s.setSyncMode(SyncMode.INCREMENTAL)); + } + + protected static final List MODEL_RECORDS = ImmutableList.of( + Jsons.jsonNode(ImmutableMap.of(COL_ID, 11, COL_MAKE_ID, 1, COL_MODEL, "Fiesta")), + Jsons.jsonNode(ImmutableMap.of(COL_ID, 12, COL_MAKE_ID, 1, COL_MODEL, "Focus")), + Jsons.jsonNode(ImmutableMap.of(COL_ID, 13, COL_MAKE_ID, 1, COL_MODEL, "Ranger")), + Jsons.jsonNode(ImmutableMap.of(COL_ID, 14, COL_MAKE_ID, 2, COL_MODEL, "GLA")), + Jsons.jsonNode(ImmutableMap.of(COL_ID, 15, COL_MAKE_ID, 2, COL_MODEL, "A 220")), + Jsons.jsonNode(ImmutableMap.of(COL_ID, 16, COL_MAKE_ID, 2, COL_MODEL, "E 350"))); + + protected void setup() throws SQLException { + createAndPopulateTables(); + } + + private void createAndPopulateTables() { + createAndPopulateActualTable(); + createAndPopulateRandomTable(); + } + + protected void executeQuery(final String query) { + try { + getDatabase().query( + ctx -> ctx + .execute(query)); + } catch (final SQLException e) { + throw new RuntimeException(e); + } + } + + public String columnClause(final Map columnsWithDataType, final Optional primaryKey) { + final StringBuilder columnClause = new StringBuilder(); + int i = 0; + for (final Map.Entry column : columnsWithDataType.entrySet()) { + columnClause.append(column.getKey()); + columnClause.append(" "); + columnClause.append(column.getValue()); + if (i < (columnsWithDataType.size() - 1)) { + columnClause.append(","); + columnClause.append(" "); + } + i++; + } + primaryKey.ifPresent(s -> columnClause.append(", PRIMARY KEY (").append(s).append(")")); + + return columnClause.toString(); + } + + public void createTable(final String schemaName, final String tableName, final String columnClause) { + executeQuery(createTableQuery(schemaName, tableName, columnClause)); + } + + public String createTableQuery(final String schemaName, final String tableName, final String columnClause) { + return String.format("CREATE TABLE %s.%s(%s);", schemaName, tableName, columnClause); + } + + public void createSchema(final String schemaName) { + executeQuery(createSchemaQuery(schemaName)); + } + + public String createSchemaQuery(final String schemaName) { + return "CREATE DATABASE " + schemaName + ";"; + } + + private void createAndPopulateActualTable() { + createSchema(MODELS_SCHEMA); + createTable(MODELS_SCHEMA, MODELS_STREAM_NAME, + columnClause(ImmutableMap.of(COL_ID, "INTEGER", COL_MAKE_ID, "INTEGER", COL_MODEL, "VARCHAR(200)"), Optional.of(COL_ID))); + for (final JsonNode recordJson : MODEL_RECORDS) { + writeModelRecord(recordJson); + } + } + + /** + * This database and table is not part of Airbyte sync. It is being created just to make sure the + * databases not being synced by Airbyte are not causing issues with our debezium logic + */ + private void createAndPopulateRandomTable() { + createSchema(MODELS_SCHEMA + "_random"); + createTable(MODELS_SCHEMA + "_random", MODELS_STREAM_NAME + "_random", + columnClause(ImmutableMap.of(COL_ID + "_random", "INTEGER", COL_MAKE_ID + "_random", "INTEGER", COL_MODEL + "_random", "VARCHAR(200)"), + Optional.of(COL_ID + "_random"))); + final List MODEL_RECORDS_RANDOM = ImmutableList.of( + Jsons + .jsonNode(ImmutableMap + .of(COL_ID + "_random", 11000, COL_MAKE_ID + "_random", 1, COL_MODEL + "_random", + "Fiesta-random")), + Jsons.jsonNode(ImmutableMap + .of(COL_ID + "_random", 12000, COL_MAKE_ID + "_random", 1, COL_MODEL + "_random", + "Focus-random")), + Jsons + .jsonNode(ImmutableMap + .of(COL_ID + "_random", 13000, COL_MAKE_ID + "_random", 1, COL_MODEL + "_random", + "Ranger-random")), + Jsons.jsonNode(ImmutableMap + .of(COL_ID + "_random", 14000, COL_MAKE_ID + "_random", 2, COL_MODEL + "_random", + "GLA-random")), + Jsons.jsonNode(ImmutableMap + .of(COL_ID + "_random", 15000, COL_MAKE_ID + "_random", 2, COL_MODEL + "_random", + "A 220-random")), + Jsons + .jsonNode(ImmutableMap + .of(COL_ID + "_random", 16000, COL_MAKE_ID + "_random", 2, COL_MODEL + "_random", + "E 350-random"))); + for (final JsonNode recordJson : MODEL_RECORDS_RANDOM) { + writeRecords(recordJson, MODELS_SCHEMA + "_random", MODELS_STREAM_NAME + "_random", + COL_ID + "_random", COL_MAKE_ID + "_random", COL_MODEL + "_random"); + } + } + + protected void writeModelRecord(final JsonNode recordJson) { + writeRecords(recordJson, MODELS_SCHEMA, MODELS_STREAM_NAME, COL_ID, COL_MAKE_ID, COL_MODEL); + } + + private void writeRecords( + final JsonNode recordJson, + final String dbName, + final String streamName, + final String idCol, + final String makeIdCol, + final String modelCol) { + executeQuery( + String.format("INSERT INTO %s.%s (%s, %s, %s) VALUES (%s, %s, '%s');", dbName, streamName, + idCol, makeIdCol, modelCol, + recordJson.get(idCol).asInt(), recordJson.get(makeIdCol).asInt(), + recordJson.get(modelCol).asText())); + } + + protected static Set removeDuplicates(final Set messages) { + final Set existingDataRecordsWithoutUpdated = new HashSet<>(); + final Set output = new HashSet<>(); + + for (final AirbyteRecordMessage message : messages) { + final ObjectNode node = message.getData().deepCopy(); + node.remove("_ab_cdc_updated_at"); + + if (existingDataRecordsWithoutUpdated.contains(node)) { + LOGGER.info("Removing duplicate node: " + node); + } else { + output.add(message); + existingDataRecordsWithoutUpdated.add(node); + } + } + + return output; + } + + protected Set extractRecordMessages(final List messages) { + final List recordMessageList = messages + .stream() + .filter(r -> r.getType() == Type.RECORD).map(AirbyteMessage::getRecord) + .collect(Collectors.toList()); + final Set recordMessageSet = new HashSet<>(recordMessageList); + + assertEquals(recordMessageList.size(), recordMessageSet.size(), + "Expected no duplicates in airbyte record message output for a single sync."); + + return recordMessageSet; + } + + protected List extractStateMessages(final List messages) { + return messages.stream().filter(r -> r.getType() == Type.STATE).map(AirbyteMessage::getState) + .collect(Collectors.toList()); + } + + private void assertExpectedRecords(final Set expectedRecords, final Set actualRecords) { + // assume all streams are cdc. + assertExpectedRecords(expectedRecords, actualRecords, actualRecords.stream().map(AirbyteRecordMessage::getStream).collect(Collectors.toSet())); + } + + private void assertExpectedRecords(final Set expectedRecords, + final Set actualRecords, + final Set cdcStreams) { + assertExpectedRecords(expectedRecords, actualRecords, cdcStreams, STREAM_NAMES); + } + + private void assertExpectedRecords(final Set expectedRecords, + final Set actualRecords, + final Set cdcStreams, + final Set streamNames) { + final Set actualData = actualRecords + .stream() + .map(recordMessage -> { + assertTrue(streamNames.contains(recordMessage.getStream())); + assertNotNull(recordMessage.getEmittedAt()); + + assertEquals(MODELS_SCHEMA, recordMessage.getNamespace()); + + final JsonNode data = recordMessage.getData(); + + if (cdcStreams.contains(recordMessage.getStream())) { + assertCdcMetaData(data, true); + } else { + assertNullCdcMetaData(data); + } + + removeCDCColumns((ObjectNode) data); + + return data; + }) + .collect(Collectors.toSet()); + + assertEquals(expectedRecords, actualData); + } + + @Test + @DisplayName("On the first sync, produce returns records that exist in the database.") + void testExistingData() throws Exception { + final CdcTargetPosition targetPosition = cdcLatestTargetPosition(); + final AutoCloseableIterator read = getSource().read(getConfig(), CONFIGURED_CATALOG, null); + final List actualRecords = AutoCloseableIterators.toListAndClose(read); + + final Set recordMessages = extractRecordMessages(actualRecords); + final List stateMessages = extractStateMessages(actualRecords); + + assertNotNull(targetPosition); + recordMessages.forEach(record -> { + assertEquals(extractPosition(record.getData()), targetPosition); + }); + + assertExpectedRecords(new HashSet<>(MODEL_RECORDS), recordMessages); + assertEquals(1, stateMessages.size()); + assertNotNull(stateMessages.get(0).getData()); + assertExpectedStateMessages(stateMessages); + } + + @Test + @DisplayName("When a record is deleted, produces a deletion record.") + void testDelete() throws Exception { + final AutoCloseableIterator read1 = getSource() + .read(getConfig(), CONFIGURED_CATALOG, null); + final List actualRecords1 = AutoCloseableIterators.toListAndClose(read1); + final List stateMessages1 = extractStateMessages(actualRecords1); + assertEquals(1, stateMessages1.size()); + assertNotNull(stateMessages1.get(0).getData()); + assertExpectedStateMessages(stateMessages1); + + executeQuery(String + .format("DELETE FROM %s.%s WHERE %s = %s", MODELS_SCHEMA, MODELS_STREAM_NAME, COL_ID, + 11)); + + final JsonNode state = Jsons.jsonNode(stateMessages1); + final AutoCloseableIterator read2 = getSource() + .read(getConfig(), CONFIGURED_CATALOG, state); + final List actualRecords2 = AutoCloseableIterators.toListAndClose(read2); + final List recordMessages2 = new ArrayList<>( + extractRecordMessages(actualRecords2)); + final List stateMessages2 = extractStateMessages(actualRecords2); + assertEquals(1, stateMessages2.size()); + assertNotNull(stateMessages2.get(0).getData()); + assertExpectedStateMessages(stateMessages2); + assertEquals(1, recordMessages2.size()); + assertEquals(11, recordMessages2.get(0).getData().get(COL_ID).asInt()); + assertCdcMetaData(recordMessages2.get(0).getData(), false); + } + + @Test + @DisplayName("When a record is updated, produces an update record.") + void testUpdate() throws Exception { + final String updatedModel = "Explorer"; + final AutoCloseableIterator read1 = getSource() + .read(getConfig(), CONFIGURED_CATALOG, null); + final List actualRecords1 = AutoCloseableIterators.toListAndClose(read1); + final List stateMessages1 = extractStateMessages(actualRecords1); + assertEquals(1, stateMessages1.size()); + assertNotNull(stateMessages1.get(0).getData()); + assertExpectedStateMessages(stateMessages1); + + executeQuery(String + .format("UPDATE %s.%s SET %s = '%s' WHERE %s = %s", MODELS_SCHEMA, MODELS_STREAM_NAME, + COL_MODEL, updatedModel, COL_ID, 11)); + + final JsonNode state = Jsons.jsonNode(stateMessages1); + final AutoCloseableIterator read2 = getSource() + .read(getConfig(), CONFIGURED_CATALOG, state); + final List actualRecords2 = AutoCloseableIterators.toListAndClose(read2); + final List recordMessages2 = new ArrayList<>( + extractRecordMessages(actualRecords2)); + final List stateMessages2 = extractStateMessages(actualRecords2); + assertEquals(1, stateMessages2.size()); + assertNotNull(stateMessages2.get(0).getData()); + assertExpectedStateMessages(stateMessages2); + assertEquals(1, recordMessages2.size()); + assertEquals(11, recordMessages2.get(0).getData().get(COL_ID).asInt()); + assertEquals(updatedModel, recordMessages2.get(0).getData().get(COL_MODEL).asText()); + assertCdcMetaData(recordMessages2.get(0).getData(), true); + } + + @SuppressWarnings({"BusyWait", "CodeBlock2Expr"}) + @Test + @DisplayName("Verify that when data is inserted into the database while a sync is happening and after the first sync, it all gets replicated.") + protected void testRecordsProducedDuringAndAfterSync() throws Exception { + + final int recordsToCreate = 20; + // first batch of records. 20 created here and 6 created in setup method. + for (int recordsCreated = 0; recordsCreated < recordsToCreate; recordsCreated++) { + final JsonNode record = + Jsons.jsonNode(ImmutableMap + .of(COL_ID, 100 + recordsCreated, COL_MAKE_ID, 1, COL_MODEL, + "F-" + recordsCreated)); + writeModelRecord(record); + } + + final AutoCloseableIterator firstBatchIterator = getSource() + .read(getConfig(), CONFIGURED_CATALOG, null); + final List dataFromFirstBatch = AutoCloseableIterators + .toListAndClose(firstBatchIterator); + final List stateAfterFirstBatch = extractStateMessages(dataFromFirstBatch); + assertEquals(1, stateAfterFirstBatch.size()); + assertNotNull(stateAfterFirstBatch.get(0).getData()); + assertExpectedStateMessages(stateAfterFirstBatch); + final Set recordsFromFirstBatch = extractRecordMessages( + dataFromFirstBatch); + assertEquals((MODEL_RECORDS.size() + recordsToCreate), recordsFromFirstBatch.size()); + + // second batch of records again 20 being created + for (int recordsCreated = 0; recordsCreated < recordsToCreate; recordsCreated++) { + final JsonNode record = + Jsons.jsonNode(ImmutableMap + .of(COL_ID, 200 + recordsCreated, COL_MAKE_ID, 1, COL_MODEL, + "F-" + recordsCreated)); + writeModelRecord(record); + } + + final JsonNode state = Jsons.jsonNode(stateAfterFirstBatch); + final AutoCloseableIterator secondBatchIterator = getSource() + .read(getConfig(), CONFIGURED_CATALOG, state); + final List dataFromSecondBatch = AutoCloseableIterators + .toListAndClose(secondBatchIterator); + + final List stateAfterSecondBatch = extractStateMessages(dataFromSecondBatch); + assertEquals(1, stateAfterSecondBatch.size()); + assertNotNull(stateAfterSecondBatch.get(0).getData()); + assertExpectedStateMessages(stateAfterSecondBatch); + + final Set recordsFromSecondBatch = extractRecordMessages( + dataFromSecondBatch); + assertEquals(recordsToCreate, recordsFromSecondBatch.size(), + "Expected 20 records to be replicated in the second sync."); + + // sometimes there can be more than one of these at the end of the snapshot and just before the + // first incremental. + final Set recordsFromFirstBatchWithoutDuplicates = removeDuplicates( + recordsFromFirstBatch); + final Set recordsFromSecondBatchWithoutDuplicates = removeDuplicates( + recordsFromSecondBatch); + + final int recordsCreatedBeforeTestCount = MODEL_RECORDS.size(); + assertTrue(recordsCreatedBeforeTestCount < recordsFromFirstBatchWithoutDuplicates.size(), + "Expected first sync to include records created while the test was running."); + assertEquals((recordsToCreate * 2) + recordsCreatedBeforeTestCount, + recordsFromFirstBatchWithoutDuplicates.size() + recordsFromSecondBatchWithoutDuplicates + .size()); + } + + @Test + @DisplayName("When both incremental CDC and full refresh are configured for different streams in a sync, the data is replicated as expected.") + void testCdcAndFullRefreshInSameSync() throws Exception { + final ConfiguredAirbyteCatalog configuredCatalog = Jsons.clone(CONFIGURED_CATALOG); + + final List MODEL_RECORDS_2 = ImmutableList.of( + Jsons.jsonNode(ImmutableMap.of(COL_ID, 110, COL_MAKE_ID, 1, COL_MODEL, "Fiesta-2")), + Jsons.jsonNode(ImmutableMap.of(COL_ID, 120, COL_MAKE_ID, 1, COL_MODEL, "Focus-2")), + Jsons.jsonNode(ImmutableMap.of(COL_ID, 130, COL_MAKE_ID, 1, COL_MODEL, "Ranger-2")), + Jsons.jsonNode(ImmutableMap.of(COL_ID, 140, COL_MAKE_ID, 2, COL_MODEL, "GLA-2")), + Jsons.jsonNode(ImmutableMap.of(COL_ID, 150, COL_MAKE_ID, 2, COL_MODEL, "A 220-2")), + Jsons.jsonNode(ImmutableMap.of(COL_ID, 160, COL_MAKE_ID, 2, COL_MODEL, "E 350-2"))); + + createTable(MODELS_SCHEMA, MODELS_STREAM_NAME + "_2", + columnClause(ImmutableMap.of(COL_ID, "INTEGER", COL_MAKE_ID, "INTEGER", COL_MODEL, "VARCHAR(200)"), Optional.of(COL_ID))); + + for (final JsonNode recordJson : MODEL_RECORDS_2) { + writeRecords(recordJson, MODELS_SCHEMA, MODELS_STREAM_NAME + "_2", COL_ID, + COL_MAKE_ID, COL_MODEL); + } + + final ConfiguredAirbyteStream airbyteStream = new ConfiguredAirbyteStream() + .withStream(CatalogHelpers.createAirbyteStream( + MODELS_STREAM_NAME + "_2", + MODELS_SCHEMA, + Field.of(COL_ID, JsonSchemaType.NUMBER), + Field.of(COL_MAKE_ID, JsonSchemaType.NUMBER), + Field.of(COL_MODEL, JsonSchemaType.STRING)) + .withSupportedSyncModes( + Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) + .withSourceDefinedPrimaryKey(List.of(List.of(COL_ID)))); + airbyteStream.setSyncMode(SyncMode.FULL_REFRESH); + + final List streams = configuredCatalog.getStreams(); + streams.add(airbyteStream); + configuredCatalog.withStreams(streams); + + final AutoCloseableIterator read1 = getSource() + .read(getConfig(), configuredCatalog, null); + final List actualRecords1 = AutoCloseableIterators.toListAndClose(read1); + + final Set recordMessages1 = extractRecordMessages(actualRecords1); + final List stateMessages1 = extractStateMessages(actualRecords1); + final HashSet names = new HashSet<>(STREAM_NAMES); + names.add(MODELS_STREAM_NAME + "_2"); + assertEquals(1, stateMessages1.size()); + assertNotNull(stateMessages1.get(0).getData()); + assertExpectedStateMessages(stateMessages1); + assertExpectedRecords(Streams.concat(MODEL_RECORDS_2.stream(), MODEL_RECORDS.stream()) + .collect(Collectors.toSet()), + recordMessages1, + Collections.singleton(MODELS_STREAM_NAME), + names); + + final JsonNode puntoRecord = Jsons + .jsonNode(ImmutableMap.of(COL_ID, 100, COL_MAKE_ID, 3, COL_MODEL, "Punto")); + writeModelRecord(puntoRecord); + + final JsonNode state = Jsons.jsonNode(extractStateMessages(actualRecords1)); + final AutoCloseableIterator read2 = getSource() + .read(getConfig(), configuredCatalog, state); + final List actualRecords2 = AutoCloseableIterators.toListAndClose(read2); + + final Set recordMessages2 = extractRecordMessages(actualRecords2); + final List stateMessages2 = extractStateMessages(actualRecords2); + assertEquals(1, stateMessages2.size()); + assertNotNull(stateMessages2.get(0).getData()); + assertExpectedStateMessages(stateMessages2); + assertExpectedRecords( + Streams.concat(MODEL_RECORDS_2.stream(), Stream.of(puntoRecord)) + .collect(Collectors.toSet()), + recordMessages2, + Collections.singleton(MODELS_STREAM_NAME), + names); + } + + @Test + @DisplayName("When no records exist, no records are returned.") + void testNoData() throws Exception { + + executeQuery(String.format("DELETE FROM %s.%s", MODELS_SCHEMA, MODELS_STREAM_NAME)); + + final AutoCloseableIterator read = getSource() + .read(getConfig(), CONFIGURED_CATALOG, null); + final List actualRecords = AutoCloseableIterators.toListAndClose(read); + + final Set recordMessages = extractRecordMessages(actualRecords); + final List stateMessages = extractStateMessages(actualRecords); + + assertExpectedRecords(Collections.emptySet(), recordMessages); + assertEquals(1, stateMessages.size()); + assertNotNull(stateMessages.get(0).getData()); + assertExpectedStateMessages(stateMessages); + } + + @Test + @DisplayName("When no changes have been made to the database since the previous sync, no records are returned.") + void testNoDataOnSecondSync() throws Exception { + final AutoCloseableIterator read1 = getSource() + .read(getConfig(), CONFIGURED_CATALOG, null); + final List actualRecords1 = AutoCloseableIterators.toListAndClose(read1); + final JsonNode state = Jsons.jsonNode(extractStateMessages(actualRecords1)); + + final AutoCloseableIterator read2 = getSource() + .read(getConfig(), CONFIGURED_CATALOG, state); + final List actualRecords2 = AutoCloseableIterators.toListAndClose(read2); + + final Set recordMessages2 = extractRecordMessages(actualRecords2); + final List stateMessages2 = extractStateMessages(actualRecords2); + + assertExpectedRecords(Collections.emptySet(), recordMessages2); + assertEquals(1, stateMessages2.size()); + assertNotNull(stateMessages2.get(0).getData()); + assertExpectedStateMessages(stateMessages2); + } + + @Test + void testCheck() throws Exception { + final AirbyteConnectionStatus status = getSource().check(getConfig()); + assertEquals(status.getStatus(), AirbyteConnectionStatus.Status.SUCCEEDED); + } + + @Test + void testDiscover() throws Exception { + final AirbyteCatalog expectedCatalog = expectedCatalogForDiscover(); + final AirbyteCatalog actualCatalog = getSource().discover(getConfig()); + + assertEquals( + expectedCatalog.getStreams().stream().sorted(Comparator.comparing(AirbyteStream::getName)) + .collect(Collectors.toList()), + actualCatalog.getStreams().stream().sorted(Comparator.comparing(AirbyteStream::getName)) + .collect(Collectors.toList())); + } + + protected AirbyteCatalog expectedCatalogForDiscover() { + final AirbyteCatalog expectedCatalog = Jsons.clone(CATALOG); + + createTable(MODELS_SCHEMA, MODELS_STREAM_NAME + "_2", + columnClause(ImmutableMap.of(COL_ID, "INTEGER", COL_MAKE_ID, "INTEGER", COL_MODEL, "VARCHAR(200)"), Optional.empty())); + + final List streams = expectedCatalog.getStreams(); + // stream with PK + streams.get(0).setSourceDefinedCursor(true); + addCdcMetadataColumns(streams.get(0)); + + final AirbyteStream streamWithoutPK = CatalogHelpers.createAirbyteStream( + MODELS_STREAM_NAME + "_2", + MODELS_SCHEMA, + Field.of(COL_ID, JsonSchemaType.NUMBER), + Field.of(COL_MAKE_ID, JsonSchemaType.NUMBER), + Field.of(COL_MODEL, JsonSchemaType.STRING)); + streamWithoutPK.setSourceDefinedPrimaryKey(Collections.emptyList()); + streamWithoutPK.setSupportedSyncModes(List.of(SyncMode.FULL_REFRESH)); + addCdcMetadataColumns(streamWithoutPK); + + final AirbyteStream randomStream = CatalogHelpers.createAirbyteStream( + MODELS_STREAM_NAME + "_random", + MODELS_SCHEMA + "_random", + Field.of(COL_ID + "_random", JsonSchemaType.NUMBER), + Field.of(COL_MAKE_ID + "_random", JsonSchemaType.NUMBER), + Field.of(COL_MODEL + "_random", JsonSchemaType.STRING)) + .withSourceDefinedCursor(true) + .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) + .withSourceDefinedPrimaryKey(List.of(List.of(COL_ID + "_random"))); + addCdcMetadataColumns(randomStream); + + streams.add(streamWithoutPK); + streams.add(randomStream); + expectedCatalog.withStreams(streams); + return expectedCatalog; + } + + protected abstract CdcTargetPosition cdcLatestTargetPosition(); + + protected abstract CdcTargetPosition extractPosition(JsonNode record); + + protected abstract void assertNullCdcMetaData(JsonNode data); + + protected abstract void assertCdcMetaData(JsonNode data, boolean deletedAtNull); + + protected abstract void removeCDCColumns(ObjectNode data); + + protected abstract void addCdcMetadataColumns(AirbyteStream stream); + + protected abstract Source getSource(); + + protected abstract JsonNode getConfig(); + + protected abstract Database getDatabase(); + + protected abstract void assertExpectedStateMessages(List stateMessages); + +} diff --git a/airbyte-integrations/bases/standard-destination-test/src/main/java/io/airbyte/integrations/standardtest/destination/DestinationAcceptanceTest.java b/airbyte-integrations/bases/standard-destination-test/src/main/java/io/airbyte/integrations/standardtest/destination/DestinationAcceptanceTest.java index 484d93038ba3..86f12d08bfb8 100644 --- a/airbyte-integrations/bases/standard-destination-test/src/main/java/io/airbyte/integrations/standardtest/destination/DestinationAcceptanceTest.java +++ b/airbyte-integrations/bases/standard-destination-test/src/main/java/io/airbyte/integrations/standardtest/destination/DestinationAcceptanceTest.java @@ -93,6 +93,8 @@ public abstract class DestinationAcceptanceTest { private static final String JOB_ID = "0"; private static final int JOB_ATTEMPT = 0; + private static final String DUMMY_CATALOG_NAME = "DummyCatalog"; + private static final Logger LOGGER = LoggerFactory.getLogger(DestinationAcceptanceTest.class); private TestDestinationEnv testEnv; @@ -415,11 +417,26 @@ public void testSecondSync() throws Exception { final AirbyteCatalog catalog = Jsons.deserialize(MoreResources.readResource(DataArgumentsProvider.EXCHANGE_RATE_CONFIG.catalogFile), AirbyteCatalog.class); final ConfiguredAirbyteCatalog configuredCatalog = CatalogHelpers.toDefaultConfiguredCatalog(catalog); + final List firstSyncMessages = MoreResources.readResource(DataArgumentsProvider.EXCHANGE_RATE_CONFIG.messageFile).lines() .map(record -> Jsons.deserialize(record, AirbyteMessage.class)).collect(Collectors.toList()); final JsonNode config = getConfig(); runSyncAndVerifyStateOutput(config, firstSyncMessages, configuredCatalog, false); + // We need to make sure that other streams\tables\files in the same location will not be + // affected\deleted\overridden by our activities during first, second or any future sync. + // So let's create a dummy data that will be checked after all sync. It should remain the same + final AirbyteCatalog dummyCatalog = + Jsons.deserialize(MoreResources.readResource(DataArgumentsProvider.EXCHANGE_RATE_CONFIG.catalogFile), AirbyteCatalog.class); + dummyCatalog.getStreams().get(0).setName(DUMMY_CATALOG_NAME); + final ConfiguredAirbyteCatalog configuredDummyCatalog = CatalogHelpers.toDefaultConfiguredCatalog(dummyCatalog); + // update messages to set new dummy stream name + firstSyncMessages.stream().filter(message -> message.getRecord() != null) + .forEach(message -> message.getRecord().setStream(DUMMY_CATALOG_NAME)); + // sync dummy data + runSyncAndVerifyStateOutput(config, firstSyncMessages, configuredDummyCatalog, false); + + // Run second sync final List secondSyncMessages = Lists.newArrayList( new AirbyteMessage() .withType(Type.RECORD) @@ -442,6 +459,10 @@ public void testSecondSync() throws Exception { runSyncAndVerifyStateOutput(config, secondSyncMessages, configuredCatalog, false); final String defaultSchema = getDefaultSchema(config); retrieveRawRecordsAndAssertSameMessages(catalog, secondSyncMessages, defaultSchema); + + // verify that other streams in the same location were not affected. If something fails here, + // then this need to be fixed in connectors logic to override only required streams + retrieveRawRecordsAndAssertSameMessages(dummyCatalog, firstSyncMessages, defaultSchema); } /** diff --git a/airbyte-integrations/bases/standard-destination-test/src/main/java/io/airbyte/integrations/standardtest/destination/NumberDataTypeTestArgumentProvider.java b/airbyte-integrations/bases/standard-destination-test/src/main/java/io/airbyte/integrations/standardtest/destination/NumberDataTypeTestArgumentProvider.java new file mode 100644 index 000000000000..ce5239460bf8 --- /dev/null +++ b/airbyte-integrations/bases/standard-destination-test/src/main/java/io/airbyte/integrations/standardtest/destination/NumberDataTypeTestArgumentProvider.java @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.standardtest.destination; + +import java.util.stream.Stream; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; + +public class NumberDataTypeTestArgumentProvider implements ArgumentsProvider { + + public static final String NUMBER_DATA_TYPE_TEST_CATALOG = "number_data_type_test_catalog.json"; + public static final String NUMBER_DATA_TYPE_TEST_MESSAGES = "number_data_type_test_messages.txt"; + public static final String NUMBER_DATA_TYPE_ARRAY_TEST_CATALOG = "number_data_type_array_test_catalog.json"; + public static final String NUMBER_DATA_TYPE_ARRAY_TEST_MESSAGES = "number_data_type_array_test_messages.txt"; + + @Override + public Stream provideArguments(ExtensionContext context) { + return Stream.of( + Arguments.of(NUMBER_DATA_TYPE_TEST_CATALOG, NUMBER_DATA_TYPE_TEST_MESSAGES), + Arguments.of(NUMBER_DATA_TYPE_ARRAY_TEST_CATALOG, NUMBER_DATA_TYPE_ARRAY_TEST_MESSAGES)); + } + +} diff --git a/airbyte-integrations/bases/standard-destination-test/src/main/java/io/airbyte/integrations/standardtest/destination/comparator/AdvancedTestDataComparator.java b/airbyte-integrations/bases/standard-destination-test/src/main/java/io/airbyte/integrations/standardtest/destination/comparator/AdvancedTestDataComparator.java index 79cdb083508b..dd775e0d1026 100644 --- a/airbyte-integrations/bases/standard-destination-test/src/main/java/io/airbyte/integrations/standardtest/destination/comparator/AdvancedTestDataComparator.java +++ b/airbyte-integrations/bases/standard-destination-test/src/main/java/io/airbyte/integrations/standardtest/destination/comparator/AdvancedTestDataComparator.java @@ -89,12 +89,13 @@ protected boolean compareJsonNodes(final JsonNode expectedValue, final JsonNode return compareDateTimeValues(expectedValue.asText(), actualValue.asText()); } else if (isDateValue(expectedValue.asText())) { return compareDateValues(expectedValue.asText(), actualValue.asText()); - } else if (expectedValue.isArray() && actualValue.isArray()) { + } else if (expectedValue.isArray()) { return compareArrays(expectedValue, actualValue); - } else if (expectedValue.isObject() && actualValue.isObject()) { + } else if (expectedValue.isObject()) { compareObjects(expectedValue, actualValue); return true; } else { + LOGGER.warn("Default comparison method!"); return compareString(expectedValue, actualValue); } } diff --git a/airbyte-integrations/bases/standard-destination-test/src/main/resources/number_data_type_array_test_catalog.json b/airbyte-integrations/bases/standard-destination-test/src/main/resources/number_data_type_array_test_catalog.json new file mode 100644 index 000000000000..77f33c308236 --- /dev/null +++ b/airbyte-integrations/bases/standard-destination-test/src/main/resources/number_data_type_array_test_catalog.json @@ -0,0 +1,38 @@ +{ + "streams": [ + { + "name": "array_test_1", + "json_schema": { + "properties": { + "array_number": { + "type": ["array"], + "items": { + "type": "number" + } + }, + "array_float": { + "type": ["array"], + "items": { + "type": "number", + "airbyte_type": "float" + } + }, + "array_integer": { + "type": ["array"], + "items": { + "type": "number", + "airbyte_type": "integer" + } + }, + "array_big_integer": { + "type": ["array"], + "items": { + "type": "number", + "airbyte_type": "big_integer" + } + } + } + } + } + ] +} diff --git a/airbyte-integrations/bases/standard-destination-test/src/main/resources/number_data_type_array_test_messages.txt b/airbyte-integrations/bases/standard-destination-test/src/main/resources/number_data_type_array_test_messages.txt new file mode 100644 index 000000000000..ce69867d8af0 --- /dev/null +++ b/airbyte-integrations/bases/standard-destination-test/src/main/resources/number_data_type_array_test_messages.txt @@ -0,0 +1,2 @@ +{"type": "RECORD", "record": {"stream": "array_test_1", "emitted_at": 1602637589100, "data": { "array_number" : [-12345.678, 100000000000000000.1234],"array_float" : [-12345.678, 0, 1000000000000000000000000000000000000000000000000000.1234], "array_integer" : [42, 0, 12345], "array_big_integer" : [0, 1141241234124123141241234124] }}} +{"type": "STATE", "state": { "data": {"start_date": "2022-02-14"}}} \ No newline at end of file diff --git a/airbyte-integrations/bases/standard-destination-test/src/main/resources/number_data_type_test_catalog.json b/airbyte-integrations/bases/standard-destination-test/src/main/resources/number_data_type_test_catalog.json new file mode 100644 index 000000000000..3cdb51d784e0 --- /dev/null +++ b/airbyte-integrations/bases/standard-destination-test/src/main/resources/number_data_type_test_catalog.json @@ -0,0 +1,47 @@ +{ + "streams": [ + { + "name": "int_test", + "json_schema": { + "properties": { + "data": { + "type": "number", + "airbyte_type": "integer" + } + } + } + }, + { + "name": "big_integer_test", + "json_schema": { + "properties": { + "data": { + "type": "number", + "airbyte_type": "big_integer" + } + } + } + }, + { + "name": "float_test", + "json_schema": { + "properties": { + "data": { + "type": "number", + "airbyte_type": "float" + } + } + } + }, + { + "name": "default_number_test", + "json_schema": { + "properties": { + "data": { + "type": "number" + } + } + } + } + ] +} diff --git a/airbyte-integrations/bases/standard-destination-test/src/main/resources/number_data_type_test_messages.txt b/airbyte-integrations/bases/standard-destination-test/src/main/resources/number_data_type_test_messages.txt new file mode 100644 index 000000000000..5fdc9da09502 --- /dev/null +++ b/airbyte-integrations/bases/standard-destination-test/src/main/resources/number_data_type_test_messages.txt @@ -0,0 +1,13 @@ +{"type": "RECORD", "record": {"stream": "int_test", "emitted_at": 1602637589100, "data": { "data" : 42 }}} +{"type": "RECORD", "record": {"stream": "int_test", "emitted_at": 1602637589200, "data": { "data" : 0 }}} +{"type": "RECORD", "record": {"stream": "int_test", "emitted_at": 1602637589300, "data": { "data" : -12345 }}} +{"type": "RECORD", "record": {"stream": "big_integer_test", "emitted_at": 1602637589100, "data": { "data" : 1231123412412314 }}} +{"type": "RECORD", "record": {"stream": "big_integer_test", "emitted_at": 1602637589200, "data": { "data" : 0 }}} +{"type": "RECORD", "record": {"stream": "big_integer_test", "emitted_at": 1602637589300, "data": { "data" : -1234 }}} +{"type": "RECORD", "record": {"stream": "float_test", "emitted_at": 1602637589100, "data": { "data" : 56.78 }}} +{"type": "RECORD", "record": {"stream": "float_test", "emitted_at": 1602637589200, "data": { "data" : 0 }}} +{"type": "RECORD", "record": {"stream": "float_test", "emitted_at": 1602637589300, "data": { "data" : -12345.678 }}} +{"type": "RECORD", "record": {"stream": "default_number_test", "emitted_at": 1602637589100, "data": { "data" : 10000000000000000000000.1234 }}} +{"type": "RECORD", "record": {"stream": "default_number_test", "emitted_at": 1602637589200, "data": { "data" : 0 }}} +{"type": "RECORD", "record": {"stream": "default_number_test", "emitted_at": 1602637589300, "data": { "data" : -12345.678 }}} +{"type": "STATE", "state": { "data": {"start_date": "2022-02-14"}}} \ No newline at end of file diff --git a/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/SourceAcceptanceTest.java b/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/SourceAcceptanceTest.java index 186d0b3c14ad..a6e2d50c85aa 100644 --- a/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/SourceAcceptanceTest.java +++ b/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/SourceAcceptanceTest.java @@ -13,6 +13,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; +import com.google.common.collect.Iterables; import com.google.common.collect.Sets; import io.airbyte.commons.json.Jsons; import io.airbyte.config.StandardCheckConnectionOutput.Status; @@ -106,6 +107,18 @@ public abstract class SourceAcceptanceTest extends AbstractSourceConnectorTest { */ protected abstract JsonNode getState() throws Exception; + /** + * Tests whether the connector under test supports the per-stream state format or should use the + * legacy format for data generated by this test. + * + * @return {@code true} if the connector supports the per-stream state format or {@code false} if it + * does not support the per-stream state format (e.g. legacy format supported). Default + * value is {@code false}. + */ + protected boolean supportsPerStream() { + return false; + } + /** * Verify that a spec operation issued to the connector returns a valid spec. */ @@ -236,7 +249,7 @@ public void testIncrementalSyncWithState() throws Exception { // when we run incremental sync again there should be no new records. Run a sync with the latest // state message and assert no records were emitted. - final JsonNode latestState = stateMessages.get(stateMessages.size() - 1).getData(); + final JsonNode latestState = Jsons.jsonNode(supportsPerStream() ? stateMessages : List.of(Iterables.getLast(stateMessages))); final List secondSyncRecords = filterRecords(runRead(configuredCatalog, latestState)); assertTrue( secondSyncRecords.isEmpty(), diff --git a/airbyte-integrations/builds.md b/airbyte-integrations/builds.md index 64ab8590280a..04ae58b281f8 100644 --- a/airbyte-integrations/builds.md +++ b/airbyte-integrations/builds.md @@ -24,7 +24,9 @@ | Chartmogul | [![source-chartmogul](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-chartmogul%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-chartmogul/) | | Cart.com | [![source-cart](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-cart%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-cart/) | | Close.com | [![source-close-com](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-close-com%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-close-com/) | +| Delighted | [![source-delighted](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-delighted%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-delighted) | | Dixa | [![source-dixa](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-dixa%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-dixa) | +| Dockerhub | [![source-dockerhub](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-dockerhub%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-dockerhub) | | Drift | [![source-drift](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-drift%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-drift) | | End-to-End Testing | [![source-e2e-test](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-e2e-test%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-e2e-test) | | Exchange Rates API | [![source-exchange-rates](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-exchange-rates%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-exchange-rates) | @@ -59,6 +61,7 @@ | Lemlist | [![source-lemlist](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-lemlist%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-lemlist) | | Mailchimp | [![source-mailchimp](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-mailchimp%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-mailchimp) | | Marketo | [![source-marketo](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-marketo%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-marketo) | +| Metabase | [![source-metabase](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-metabase%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-metabase) | | Microsoft SQL Server \(MSSQL\) | [![source-mssql](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-mssql%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-mssql) | | Microsoft Teams | [![source-microsoft-teams](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-microsoft-teams%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-microsoft-teams) | | Mixpanel | [![source-mixpanel](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-mixpanel%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-mixpanel) | @@ -70,6 +73,7 @@ | OneSignal | [![source-onesignal](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-onesignal%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-onesignal) | | OpenWeather | [![source-openweather](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-openweather%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-openweather) | | Oracle DB | [![source-oracle](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-oracle%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-oracle) | +| Orbit | [![source-orbit](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-orbit%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-orbit) | | Paypal Transaction | [![paypal-transaction](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-paypal-transaction%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-paypal-transaction) | | Paystack | [![source-paystack](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-paystack%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-paystack) | | PersistIq | [![source-persistiq](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-persistiq%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-persistiq/) | diff --git a/airbyte-integrations/connector-templates/destination-java/build.gradle.hbs b/airbyte-integrations/connector-templates/destination-java/build.gradle.hbs index 48171415dfee..3c6c4d037882 100644 --- a/airbyte-integrations/connector-templates/destination-java/build.gradle.hbs +++ b/airbyte-integrations/connector-templates/destination-java/build.gradle.hbs @@ -9,8 +9,8 @@ application { } dependencies { - implementation project(':airbyte-config:models') - implementation project(':airbyte-protocol:models') + implementation project(':airbyte-config:config-models') + implementation project(':airbyte-protocol:protocol-models') implementation project(':airbyte-integrations:bases:base-java') implementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) diff --git a/airbyte-integrations/connector-templates/generator/plopfile.js b/airbyte-integrations/connector-templates/generator/plopfile.js index d9914238b4a4..d6e52fba5aca 100644 --- a/airbyte-integrations/connector-templates/generator/plopfile.js +++ b/airbyte-integrations/connector-templates/generator/plopfile.js @@ -14,9 +14,8 @@ Your ${connectorName} connector has been created at .${path.resolve(outputPath)} Follow the TODOs in the generated module to implement your connector. -Questions, comments, or concerns? Let us know at: -Slack: https://slack.airbyte.io -Github: https://github.com/airbytehq/airbyte +Questions, comments, or concerns? Let us know in our connector development forum: +https://discuss.airbyte.io/c/connector-development/16 We're always happy to provide any support! diff --git a/airbyte-integrations/connectors/destination-azure-blob-storage/Dockerfile b/airbyte-integrations/connectors/destination-azure-blob-storage/Dockerfile index f0c88d716a67..95c65d89053d 100644 --- a/airbyte-integrations/connectors/destination-azure-blob-storage/Dockerfile +++ b/airbyte-integrations/connectors/destination-azure-blob-storage/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-azure-blob-storage COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.4 +LABEL io.airbyte.version=0.1.5 LABEL io.airbyte.name=airbyte/destination-azure-blob-storage diff --git a/airbyte-integrations/connectors/destination-bigquery-denormalized/Dockerfile b/airbyte-integrations/connectors/destination-bigquery-denormalized/Dockerfile index ec8026955d27..48355d1a9d4e 100644 --- a/airbyte-integrations/connectors/destination-bigquery-denormalized/Dockerfile +++ b/airbyte-integrations/connectors/destination-bigquery-denormalized/Dockerfile @@ -17,5 +17,5 @@ ENV ENABLE_SENTRY true COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=1.1.8 +LABEL io.airbyte.version=1.1.11 LABEL io.airbyte.name=airbyte/destination-bigquery-denormalized diff --git a/airbyte-integrations/connectors/destination-bigquery-denormalized/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-bigquery-denormalized/src/main/resources/spec.json index fb64ce159db0..293424c8b3e7 100644 --- a/airbyte-integrations/connectors/destination-bigquery-denormalized/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/destination-bigquery-denormalized/src/main/resources/spec.json @@ -31,7 +31,6 @@ "oneOf": [ { "title": "Standard Inserts", - "additionalProperties": false, "required": ["method"], "properties": { "method": { @@ -42,7 +41,6 @@ }, { "title": "GCS Staging", - "additionalProperties": false, "type": "object", "required": [ "method", @@ -112,16 +110,6 @@ "examples": ["data_sync/test"], "order": 3 }, - "part_size_mb": { - "title": "Block Size (MB) for GCS Multipart Upload (Optional)", - "description": "This is the size of a \"Part\" being buffered in memory. It limits the memory usage when writing. Larger values will allow to upload a bigger files and improve the speed, but consumes more memory. Allowed values: min=5MB, max=525MB Default: 5MB.", - "type": "integer", - "default": 5, - "minimum": 5, - "maximum": 525, - "examples": [5], - "order": 4 - }, "keep_files_in_gcs-bucket": { "type": "string", "description": "This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default \"Delete all tmp files from GCS\" value is used if not set explicitly.", @@ -131,7 +119,7 @@ "Delete all tmp files from GCS", "Keep all tmp files in GCS" ], - "order": 5 + "order": 4 } } } diff --git a/airbyte-integrations/connectors/destination-bigquery-denormalized/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDenormalizedGcsDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-bigquery-denormalized/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDenormalizedGcsDestinationAcceptanceTest.java index 8bb59272fca0..23233ad2543f 100644 --- a/airbyte-integrations/connectors/destination-bigquery-denormalized/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDenormalizedGcsDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-bigquery-denormalized/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDenormalizedGcsDestinationAcceptanceTest.java @@ -37,7 +37,6 @@ protected JsonNode createConfig() throws IOException { .put(BigQueryConsts.METHOD, BigQueryConsts.GCS_STAGING) .put(BigQueryConsts.GCS_BUCKET_NAME, gcsConfigFromSecretFile.get(BigQueryConsts.GCS_BUCKET_NAME)) .put(BigQueryConsts.GCS_BUCKET_PATH, gcsConfigFromSecretFile.get(BigQueryConsts.GCS_BUCKET_PATH).asText() + System.currentTimeMillis()) - .put(BigQueryConsts.PART_SIZE, gcsConfigFromSecretFile.get(BigQueryConsts.PART_SIZE)) .put(BigQueryConsts.CREDENTIAL, credential) .build()); diff --git a/airbyte-integrations/connectors/destination-bigquery/Dockerfile b/airbyte-integrations/connectors/destination-bigquery/Dockerfile index db02ad580dba..a1a89342f3cf 100644 --- a/airbyte-integrations/connectors/destination-bigquery/Dockerfile +++ b/airbyte-integrations/connectors/destination-bigquery/Dockerfile @@ -17,5 +17,5 @@ ENV ENABLE_SENTRY true COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=1.1.8 +LABEL io.airbyte.version=1.1.11 LABEL io.airbyte.name=airbyte/destination-bigquery diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryConsts.java b/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryConsts.java index 3669d7680d89..016c8365ad30 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryConsts.java +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryConsts.java @@ -23,7 +23,6 @@ public class BigQueryConsts { public static final String FORMAT = "format"; public static final String KEEP_GCS_FILES = "keep_files_in_gcs-bucket"; public static final String KEEP_GCS_FILES_VAL = "Keep all tmp files in GCS"; - public static final String PART_SIZE = "part_size_mb"; public static final String NAMESPACE_PREFIX = "n"; diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryUtils.java b/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryUtils.java index 3426c9719722..6ae70cd99629 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryUtils.java +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryUtils.java @@ -143,8 +143,7 @@ public static JsonNode getGcsJsonNodeConfig(final JsonNode config) { .put(BigQueryConsts.CREDENTIAL, loadingMethod.get(BigQueryConsts.CREDENTIAL)) .put(BigQueryConsts.FORMAT, Jsons.deserialize("{\n" + " \"format_type\": \"CSV\",\n" - + " \"flattening\": \"No flattening\",\n" - + " \"part_size_mb\": \"" + loadingMethod.get(BigQueryConsts.PART_SIZE) + "\"\n" + + " \"flattening\": \"No flattening\"\n" + "}")) .build()); @@ -165,8 +164,7 @@ public static JsonNode getGcsAvroJsonNodeConfig(final JsonNode config) { .put(BigQueryConsts.CREDENTIAL, loadingMethod.get(BigQueryConsts.CREDENTIAL)) .put(BigQueryConsts.FORMAT, Jsons.deserialize("{\n" + " \"format_type\": \"AVRO\",\n" - + " \"flattening\": \"No flattening\",\n" - + " \"part_size_mb\": \"" + loadingMethod.get(BigQueryConsts.PART_SIZE) + "\"\n" + + " \"flattening\": \"No flattening\"\n" + "}")) .build()); diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-bigquery/src/main/resources/spec.json index d26e17dd7ce8..1939c8eb7278 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/resources/spec.json @@ -70,7 +70,6 @@ "oneOf": [ { "title": "Standard Inserts", - "additionalProperties": false, "required": ["method"], "properties": { "method": { @@ -81,7 +80,6 @@ }, { "title": "GCS Staging", - "additionalProperties": false, "required": [ "method", "gcs_bucket_name", @@ -149,16 +147,6 @@ "examples": ["data_sync/test"], "order": 3 }, - "part_size_mb": { - "title": "Block Size (MB) for GCS Multipart Upload (Optional)", - "description": "This is the size of a \"Part\" being buffered in memory. It limits the memory usage when writing. Larger values will allow to upload a bigger files and improve the speed, but consumes more memory. Allowed values: min=5MB, max=525MB Default: 5MB.", - "type": "integer", - "default": 5, - "minimum": 5, - "maximum": 525, - "examples": [5], - "order": 4 - }, "keep_files_in_gcs-bucket": { "type": "string", "description": "This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default \"Delete all tmp files from GCS\" value is used if not set explicitly.", @@ -168,7 +156,7 @@ "Delete all tmp files from GCS", "Keep all tmp files in GCS" ], - "order": 5 + "order": 4 } } } diff --git a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDestinationAcceptanceTest.java index 45a1074d0706..edda58225624 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDestinationAcceptanceTest.java @@ -112,8 +112,7 @@ protected boolean supportBasicDataTypeTest() { @Override protected boolean supportArrayDataTypeTest() { - // #13154 Normalization issue - return false; + return true; } @Override diff --git a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryGcsDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryGcsDestinationAcceptanceTest.java index 3fbffdc388b8..9226bec91b69 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryGcsDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryGcsDestinationAcceptanceTest.java @@ -45,7 +45,6 @@ protected void setup(final TestDestinationEnv testEnv) throws Exception { .put(BigQueryConsts.METHOD, BigQueryConsts.GCS_STAGING) .put(BigQueryConsts.GCS_BUCKET_NAME, gcsConfigFromSecretFile.get(BigQueryConsts.GCS_BUCKET_NAME)) .put(BigQueryConsts.GCS_BUCKET_PATH, gcsConfigFromSecretFile.get(BigQueryConsts.GCS_BUCKET_PATH).asText() + System.currentTimeMillis()) - .put(BigQueryConsts.PART_SIZE, gcsConfigFromSecretFile.get(BigQueryConsts.PART_SIZE)) .put(BigQueryConsts.CREDENTIAL, credential) .build()); diff --git a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryTestDataComparator.java b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryTestDataComparator.java index e2223f5494e2..392d0687142f 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryTestDataComparator.java +++ b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryTestDataComparator.java @@ -4,6 +4,8 @@ package io.airbyte.integrations.destination.bigquery; +import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.destination.StandardNameTransformer; import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; import java.time.LocalDate; @@ -49,6 +51,19 @@ private LocalDateTime parseDateTime(String dateTimeValue) { } } + @Override + protected ZonedDateTime parseDestinationDateWithTz(String destinationValue) { + if (destinationValue != null) { + if (destinationValue.matches(".+Z")) { + return ZonedDateTime.of(LocalDateTime.parse(destinationValue, DateTimeFormatter.ofPattern(BIGQUERY_DATETIME_FORMAT)), ZoneOffset.UTC); + } else { + return ZonedDateTime.parse(destinationValue, getAirbyteDateTimeWithTzFormatter()).withZoneSameInstant(ZoneOffset.UTC); + } + } else { + return null; + } + } + @Override protected boolean compareDateTimeValues(String expectedValue, String actualValue) { var destinationDate = parseDateTime(actualValue); @@ -70,11 +85,6 @@ protected boolean compareDateValues(String expectedValue, String actualValue) { return expectedDate.equals(destinationDate); } - @Override - protected ZonedDateTime parseDestinationDateWithTz(String destinationValue) { - return ZonedDateTime.of(LocalDateTime.parse(destinationValue, DateTimeFormatter.ofPattern(BIGQUERY_DATETIME_FORMAT)), ZoneOffset.UTC); - } - @Override protected boolean compareDateTimeWithTzValues(String airbyteMessageValue, String destinationValue) { // #13123 Normalization issue @@ -92,4 +102,10 @@ private ZonedDateTime getBrokenDate() { return ZonedDateTime.of(1583, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); } + @Override + protected void compareObjects(JsonNode expectedObject, JsonNode actualObject) { + JsonNode actualJsonNode = (actualObject.isTextual() ? Jsons.deserialize(actualObject.textValue()) : actualObject); + super.compareObjects(expectedObject, actualJsonNode); + } + } diff --git a/airbyte-integrations/connectors/destination-cassandra/Dockerfile b/airbyte-integrations/connectors/destination-cassandra/Dockerfile index 2ee5c1d10a3b..5bb5b6b4dac1 100644 --- a/airbyte-integrations/connectors/destination-cassandra/Dockerfile +++ b/airbyte-integrations/connectors/destination-cassandra/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-cassandra COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.1 +LABEL io.airbyte.version=0.1.2 LABEL io.airbyte.name=airbyte/destination-cassandra diff --git a/airbyte-integrations/connectors/destination-cassandra/build.gradle b/airbyte-integrations/connectors/destination-cassandra/build.gradle index c606ce79fbe4..593aa67c4e44 100644 --- a/airbyte-integrations/connectors/destination-cassandra/build.gradle +++ b/airbyte-integrations/connectors/destination-cassandra/build.gradle @@ -25,7 +25,7 @@ dependencies { // https://mvnrepository.com/artifact/org.assertj/assertj-core testImplementation "org.assertj:assertj-core:${assertVersion}" - testImplementation libs.testcontainers.cassandra + testImplementation libs.connectors.testcontainers.cassandra integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') diff --git a/airbyte-integrations/connectors/destination-cassandra/src/test-integration/java/io/airbyte/integrations/destination/cassandra/CassandraDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-cassandra/src/test-integration/java/io/airbyte/integrations/destination/cassandra/CassandraDestinationAcceptanceTest.java index 3e32df81ce7b..d78eb667bd13 100644 --- a/airbyte-integrations/connectors/destination-cassandra/src/test-integration/java/io/airbyte/integrations/destination/cassandra/CassandraDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-cassandra/src/test-integration/java/io/airbyte/integrations/destination/cassandra/CassandraDestinationAcceptanceTest.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; +import io.airbyte.integrations.util.HostPortResolver; import java.util.Comparator; import java.util.List; import java.util.stream.Collectors; @@ -16,8 +17,6 @@ public class CassandraDestinationAcceptanceTest extends DestinationAcceptanceTest { - private static final Logger LOGGER = LoggerFactory.getLogger(CassandraDestinationAcceptanceTest.class); - private JsonNode configJson; private CassandraCqlProvider cassandraCqlProvider; @@ -36,8 +35,8 @@ protected void setup(TestDestinationEnv testEnv) { configJson = TestDataFactory.createJsonConfig( cassandraContainer.getUsername(), cassandraContainer.getPassword(), - cassandraContainer.getHost(), - cassandraContainer.getFirstMappedPort()); + HostPortResolver.resolveHost(cassandraContainer), + HostPortResolver.resolvePort(cassandraContainer)); var cassandraConfig = new CassandraConfig(configJson); cassandraCqlProvider = new CassandraCqlProvider(cassandraConfig); cassandraNameTransformer = new CassandraNameTransformer(cassandraConfig); diff --git a/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/Dockerfile index 2a88f7bf1248..8920d8d822e9 100644 --- a/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-clickhouse-strict-encrypt COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.7 +LABEL io.airbyte.version=0.1.8 LABEL io.airbyte.name=airbyte/destination-clickhouse-strict-encrypt diff --git a/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/build.gradle b/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/build.gradle index fd7391ab799d..8348797d544d 100644 --- a/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/build.gradle @@ -21,10 +21,10 @@ dependencies { implementation 'ru.yandex.clickhouse:clickhouse-jdbc:0.3.1-patch' // https://mvnrepository.com/artifact/org.testcontainers/clickhouse - testImplementation libs.testcontainers.clickhouse + testImplementation libs.connectors.destination.testcontainers.clickhouse integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-clickhouse') // https://mvnrepository.com/artifact/org.testcontainers/clickhouse - integrationTestJavaImplementation libs.testcontainers.clickhouse + integrationTestJavaImplementation libs.connectors.destination.testcontainers.clickhouse } diff --git a/airbyte-integrations/connectors/destination-clickhouse/Dockerfile b/airbyte-integrations/connectors/destination-clickhouse/Dockerfile index a909fe50fab8..fa443a93f73f 100644 --- a/airbyte-integrations/connectors/destination-clickhouse/Dockerfile +++ b/airbyte-integrations/connectors/destination-clickhouse/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-clickhouse COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.6 +LABEL io.airbyte.version=0.1.7 LABEL io.airbyte.name=airbyte/destination-clickhouse diff --git a/airbyte-integrations/connectors/destination-clickhouse/build.gradle b/airbyte-integrations/connectors/destination-clickhouse/build.gradle index d54cc3599b6b..8400dc16962b 100644 --- a/airbyte-integrations/connectors/destination-clickhouse/build.gradle +++ b/airbyte-integrations/connectors/destination-clickhouse/build.gradle @@ -21,11 +21,11 @@ dependencies { implementation 'ru.yandex.clickhouse:clickhouse-jdbc:0.3.1-patch' // https://mvnrepository.com/artifact/org.testcontainers/clickhouse - testImplementation libs.testcontainers.clickhouse + testImplementation libs.connectors.destination.testcontainers.clickhouse integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-clickhouse') integrationTestJavaImplementation project(':airbyte-workers') // https://mvnrepository.com/artifact/org.testcontainers/clickhouse - integrationTestJavaImplementation libs.testcontainers.clickhouse + integrationTestJavaImplementation libs.connectors.destination.testcontainers.clickhouse } diff --git a/airbyte-integrations/connectors/destination-clickhouse/src/test-integration/java/io/airbyte/integrations/destination/clickhouse/SshClickhouseDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-clickhouse/src/test-integration/java/io/airbyte/integrations/destination/clickhouse/SshClickhouseDestinationAcceptanceTest.java index e235cc3950fa..9745f7bac2cf 100644 --- a/airbyte-integrations/connectors/destination-clickhouse/src/test-integration/java/io/airbyte/integrations/destination/clickhouse/SshClickhouseDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-clickhouse/src/test-integration/java/io/airbyte/integrations/destination/clickhouse/SshClickhouseDestinationAcceptanceTest.java @@ -22,6 +22,7 @@ import java.util.stream.Collectors; import org.junit.jupiter.api.Disabled; import org.testcontainers.containers.ClickHouseContainer; +import org.testcontainers.containers.Network; /** * Abstract class that allows us to avoid duplicating testing logic for testing SSH with a key file @@ -32,6 +33,7 @@ public abstract class SshClickhouseDestinationAcceptanceTest extends Destination public abstract SshTunnel.TunnelMethod getTunnelMethod(); private static final String DB_NAME = "default"; + private static final Network network = Network.newNetwork(); private final ExtendedNameTransformer namingResolver = new ExtendedNameTransformer(); @@ -158,8 +160,8 @@ private static JdbcDatabase getDatabase(final JsonNode config) { @Override protected void setup(final TestDestinationEnv testEnv) { - bastion.initAndStartBastion(); - db = (ClickHouseContainer) new ClickHouseContainer("yandex/clickhouse-server").withNetwork(bastion.getNetWork()); + bastion.initAndStartBastion(network); + db = (ClickHouseContainer) new ClickHouseContainer("yandex/clickhouse-server").withNetwork(network); db.start(); } diff --git a/airbyte-integrations/connectors/destination-csv/Dockerfile b/airbyte-integrations/connectors/destination-csv/Dockerfile index 12791f0e238f..f9f3456bf150 100644 --- a/airbyte-integrations/connectors/destination-csv/Dockerfile +++ b/airbyte-integrations/connectors/destination-csv/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-csv COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.2.9 +LABEL io.airbyte.version=0.2.10 LABEL io.airbyte.name=airbyte/destination-csv diff --git a/airbyte-integrations/connectors/destination-databricks/BOOTSTRAP.md b/airbyte-integrations/connectors/destination-databricks/BOOTSTRAP.md index 7fd07fe88388..7f53edbea0ce 100644 --- a/airbyte-integrations/connectors/destination-databricks/BOOTSTRAP.md +++ b/airbyte-integrations/connectors/destination-databricks/BOOTSTRAP.md @@ -1,6 +1,6 @@ -# Databricks Delta Lake Destination Connector Bootstrap +# Databricks Lakehouse Destination Connector Bootstrap -The Databricks Delta Lake Connector enables a developer to sync data into a Databricks cluster. It does so in two steps: +This destination syncs data to Delta Lake on Databricks Lakehouse. It does so in two steps: 1. Persist source data in S3 staging files in the Parquet format. 2. Create delta table based on the Parquet staging files. diff --git a/airbyte-integrations/connectors/destination-databricks/Dockerfile b/airbyte-integrations/connectors/destination-databricks/Dockerfile index d82b3794ac09..ae5588e07ac4 100644 --- a/airbyte-integrations/connectors/destination-databricks/Dockerfile +++ b/airbyte-integrations/connectors/destination-databricks/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-databricks COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.2.1 +LABEL io.airbyte.version=0.2.3 LABEL io.airbyte.name=airbyte/destination-databricks diff --git a/airbyte-integrations/connectors/destination-databricks/README.md b/airbyte-integrations/connectors/destination-databricks/README.md index 57f0e0ef137c..d9cf5de58499 100644 --- a/airbyte-integrations/connectors/destination-databricks/README.md +++ b/airbyte-integrations/connectors/destination-databricks/README.md @@ -1,4 +1,4 @@ -# Destination Databricks Delta Lake +# Destination Databricks Lakehouse This is the repository for the Databricks destination connector in Java. For information about how to use this connector within Airbyte, see [the User Documentation](https://docs.airbyte.io/integrations/destinations/databricks). diff --git a/airbyte-integrations/connectors/destination-databricks/build.gradle b/airbyte-integrations/connectors/destination-databricks/build.gradle index 55926d593a9e..e5bca02f4cd1 100644 --- a/airbyte-integrations/connectors/destination-databricks/build.gradle +++ b/airbyte-integrations/connectors/destination-databricks/build.gradle @@ -35,9 +35,9 @@ dependencies { implementation group: 'com.databricks', name: 'databricks-jdbc', version: '2.6.25' // parquet - implementation group: 'org.apache.hadoop', name: 'hadoop-common', version: '3.3.0' - implementation group: 'org.apache.hadoop', name: 'hadoop-aws', version: '3.3.0' - implementation group: 'org.apache.hadoop', name: 'hadoop-mapreduce-client-core', version: '3.3.0' + implementation group: 'org.apache.hadoop', name: 'hadoop-common', version: '3.3.3' + implementation group: 'org.apache.hadoop', name: 'hadoop-aws', version: '3.3.3' + implementation group: 'org.apache.hadoop', name: 'hadoop-mapreduce-client-core', version: '3.3.3' implementation group: 'org.apache.parquet', name: 'parquet-avro', version: '1.12.0' implementation group: 'com.github.airbytehq', name: 'json-avro-converter', version: '1.0.1' diff --git a/airbyte-integrations/connectors/destination-databricks/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-databricks/src/main/resources/spec.json index 147fc8055e4a..c7fc3a259393 100644 --- a/airbyte-integrations/connectors/destination-databricks/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/destination-databricks/src/main/resources/spec.json @@ -6,7 +6,7 @@ "supported_destination_sync_modes": ["overwrite", "append"], "connectionSpecification": { "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Databricks Delta Lake Destination Spec", + "title": "Databricks Lakehouse Destination Spec", "type": "object", "required": [ "accept_terms", diff --git a/airbyte-integrations/connectors/destination-dev-null/Dockerfile b/airbyte-integrations/connectors/destination-dev-null/Dockerfile index 007170ec3fa5..e37fc860dd67 100644 --- a/airbyte-integrations/connectors/destination-dev-null/Dockerfile +++ b/airbyte-integrations/connectors/destination-dev-null/Dockerfile @@ -17,5 +17,5 @@ ENV ENABLE_SENTRY true COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.2.6 +LABEL io.airbyte.version=0.2.7 LABEL io.airbyte.name=airbyte/destination-dev-null diff --git a/airbyte-integrations/connectors/destination-dynamodb/Dockerfile b/airbyte-integrations/connectors/destination-dynamodb/Dockerfile index 487b94174678..5f80d086a636 100644 --- a/airbyte-integrations/connectors/destination-dynamodb/Dockerfile +++ b/airbyte-integrations/connectors/destination-dynamodb/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-dynamodb COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.3 +LABEL io.airbyte.version=0.1.4 LABEL io.airbyte.name=airbyte/destination-dynamodb diff --git a/airbyte-integrations/connectors/destination-e2e-test/Dockerfile b/airbyte-integrations/connectors/destination-e2e-test/Dockerfile index 4fc4ce7101b4..f0f8310b29b4 100644 --- a/airbyte-integrations/connectors/destination-e2e-test/Dockerfile +++ b/airbyte-integrations/connectors/destination-e2e-test/Dockerfile @@ -17,5 +17,5 @@ ENV ENABLE_SENTRY true COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.2.3 +LABEL io.airbyte.version=0.2.4 LABEL io.airbyte.name=airbyte/destination-e2e-test diff --git a/airbyte-integrations/connectors/destination-elasticsearch/build.gradle b/airbyte-integrations/connectors/destination-elasticsearch/build.gradle index 725b1b61a8f0..dc5b8e7c8788 100644 --- a/airbyte-integrations/connectors/destination-elasticsearch/build.gradle +++ b/airbyte-integrations/connectors/destination-elasticsearch/build.gradle @@ -29,9 +29,8 @@ dependencies { // MIT // https://www.testcontainers.org/ - //implementation libs.testcontainers.elasticsearch - testImplementation libs.testcontainers.elasticsearch - integrationTestJavaImplementation libs.testcontainers.elasticsearch + testImplementation libs.connectors.testcontainers.elasticsearch + integrationTestJavaImplementation libs.connectors.testcontainers.elasticsearch integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-elasticsearch') diff --git a/airbyte-integrations/connectors/destination-firebolt/Dockerfile b/airbyte-integrations/connectors/destination-firebolt/Dockerfile new file mode 100644 index 000000000000..01a8aed15fc1 --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/Dockerfile @@ -0,0 +1,29 @@ +FROM python:3.9-slim as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip3 install --prefix=/install --no-cache-dir . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# copy payload code only +COPY main.py ./ +COPY destination_firebolt ./destination_firebolt + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python3", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/destination-firebolt diff --git a/airbyte-integrations/connectors/destination-firebolt/README.md b/airbyte-integrations/connectors/destination-firebolt/README.md new file mode 100644 index 000000000000..13e918af34b0 --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/README.md @@ -0,0 +1,123 @@ +# Firebolt Destination + +This is the repository for the Firebolt destination connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/destinations/firebolt). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.7.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +From the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:destination-firebolt:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/destinations/firebolt) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `destination_firebolt/spec.json` file. +Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `destination firebolt test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +# messages.jsonl is a file containing line-separated JSON representing AirbyteMessages +cat integration_tests/messages.jsonl | python main.py write --config secrets/config_sql.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/destination-firebolt:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:destination-firebolt:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/destination-firebolt:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-firebolt:dev check --config /secrets/config.json +# messages.jsonl is a file containing line-separated JSON representing AirbyteMessages +cat integration_tests/messages.jsonl | docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-firebolt:dev write --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. +First install test dependencies into your virtual environment: +``` +pip install .[tests] +``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +python -m pytest unit_tests +``` + +### Integration Tests +There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all destination connectors) and custom integration tests (which are specific to this connector). +#### Custom Integration tests +Place custom tests inside `integration_tests/` folder, then, from the connector root, run +``` +python -m pytest integration_tests +``` +#### Acceptance Tests +Coming soon: + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:destination-firebolt:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:destination-firebolt:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/destination-firebolt/bootstrap.md b/airbyte-integrations/connectors/destination-firebolt/bootstrap.md new file mode 100644 index 000000000000..dade5200d2d5 --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/bootstrap.md @@ -0,0 +1,22 @@ +# Firebolt Source + +## Overview + +Firebolt is a cloud data warehouse purpose-built to provide sub-second analytics performance on massive, terabyte-scale data sets. + +Firebolt has two main concepts: Databases, which denote the storage of data and Engines, which describe the compute layer on top of a Database. + +Firebolt has three types of tables: External, Fact and Dimension. External tables, which represent a raw file structure in storage. Dimension tables, which are optimised for fetching and store data on each node in an Engine. Fact tables are similar to Dimension, but they shard the data across the nodes. The usual workload is to write source data into a set of files on S3, wrap them with an External table and write this data to a fetch-optimised Fact or Dimension table. + +## Connector + +Firebolt is a data warehouse so the most efficient way to write data into it would be in bulk. Firebolt connector offers two ways of writing data: SQL and S3. SQL transfers data in small batches and is most useful for prototyping. S3 buffers data on Amazon S3 storage and persists the data to Firebolt at the end of execution. The latter is the most efficient way of loading data, but it requires AWS S3 access. + +This connector uses [firebolt-sdk](https://pypi.org/project/firebolt-sdk/), which is a [PEP-249](https://peps.python.org/pep-0249/) DB API implementation. +`Connection` object is used to connect to a specified Engine, wich runs subsequent queries against the data stored in the Database using the `Cursor` object. +[Pyarrow](https://pypi.org/project/pyarrow/) is used to efficiently store and upload data to S3. + +## Notes + +* Integration testing requires the user to have a running engine. Spinning up an engine can take a while so this ensures a faster iteration on the connector. +* S3 is generally faster writing strategy and should be preferred. \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-firebolt/build.gradle b/airbyte-integrations/connectors/destination-firebolt/build.gradle new file mode 100644 index 000000000000..08c1a70562ae --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/build.gradle @@ -0,0 +1,8 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' +} + +airbytePython { + moduleDirectory 'destination_firebolt' +} diff --git a/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/__init__.py b/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/__init__.py new file mode 100644 index 000000000000..90396b049287 --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + + +from .destination import DestinationFirebolt + +__all__ = ["DestinationFirebolt"] diff --git a/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/destination.py b/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/destination.py new file mode 100644 index 000000000000..c09168dfe5a2 --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/destination.py @@ -0,0 +1,128 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import json +from datetime import datetime +from logging import getLogger +from typing import Any, Dict, Iterable, Mapping, Optional +from uuid import uuid4 + +from airbyte_cdk import AirbyteLogger +from airbyte_cdk.destinations import Destination +from airbyte_cdk.models import AirbyteConnectionStatus, AirbyteMessage, ConfiguredAirbyteCatalog, DestinationSyncMode, Status, Type +from firebolt.client import DEFAULT_API_URL +from firebolt.client.auth import UsernamePassword +from firebolt.db import Connection, connect + +from .writer import create_firebolt_wirter + +logger = getLogger("airbyte") + + +def parse_config(config: json, logger: Optional[AirbyteLogger] = None) -> Dict[str, Any]: + """ + Convert dict of config values to firebolt.db.Connection arguments + :param config: json-compatible dict of settings + :param logger: AirbyteLogger instance to print logs. + :return: dictionary of firebolt.db.Connection-compatible kwargs + """ + connection_args = { + "database": config["database"], + "auth": UsernamePassword(config["username"], config["password"]), + "api_endpoint": config.get("host", DEFAULT_API_URL), + "account_name": config.get("account"), + } + # engine can be a name or a full URL of a cluster + engine = config.get("engine") + if engine: + if "." in engine: + connection_args["engine_url"] = engine + else: + connection_args["engine_name"] = engine + elif logger: + logger.info("Engine parameter was not provided. Connecting to the default engine.") + return connection_args + + +def establish_connection(config: json, logger: Optional[AirbyteLogger] = None) -> Connection: + """ + Creates a connection to Firebolt database using the parameters provided. + :param config: Json object containing db credentials. + :param logger: AirbyteLogger instance to print logs. + :return: PEP-249 compliant database Connection object. + """ + logger.debug("Connecting to Firebolt.") if logger else None + connection = connect(**parse_config(config, logger)) + logger.debug("Connection to Firebolt established.") if logger else None + return connection + + +class DestinationFirebolt(Destination): + def write( + self, config: Mapping[str, Any], configured_catalog: ConfiguredAirbyteCatalog, input_messages: Iterable[AirbyteMessage] + ) -> Iterable[AirbyteMessage]: + + """ + Reads the input stream of messages, config, and catalog to write data to the destination. + + This method returns an iterable (typically a generator of AirbyteMessages via yield) containing state messages received + in the input message stream. Outputting a state message means that every AirbyteRecordMessage which came before it has been + successfully persisted to the destination. This is used to ensure fault tolerance in the case that a sync fails before fully completing, + then the source is given the last state message output from this method as the starting point of the next sync. + + :param config: dict of JSON configuration matching the configuration declared in spec.json + :param configured_catalog: The Configured Catalog describing the schema of the data being received and how it should be persisted in the + destination + :param input_messages: The stream of input messages received from the source + :return: Iterable of AirbyteStateMessages wrapped in AirbyteMessage structs + """ + streams = {s.stream.name for s in configured_catalog.streams} + + with establish_connection(config) as connection: + writer = create_firebolt_wirter(connection, config, logger) + + for configured_stream in configured_catalog.streams: + if configured_stream.destination_sync_mode == DestinationSyncMode.overwrite: + writer.delete_table(configured_stream.stream.name) + logger.info(f"Stream {configured_stream.stream.name} is wiped.") + writer.create_raw_table(configured_stream.stream.name) + + for message in input_messages: + if message.type == Type.STATE: + yield message + elif message.type == Type.RECORD: + data = message.record.data + stream = message.record.stream + # Skip unselected streams + if stream not in streams: + logger.debug(f"Stream {stream} was not present in configured streams, skipping") + continue + writer.queue_write_data(stream, str(uuid4()), datetime.now(), json.dumps(data)) + + # Flush any leftover messages + writer.flush() + + def check(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> AirbyteConnectionStatus: + """ + Tests if the input configuration can be used to successfully connect to the destination with the needed permissions + e.g: if a provided API token or password can be used to connect and write to the destination. + + :param logger: Logging object to display debug/info/error to the logs + (logs will not be accessible via airbyte UI if they are not passed to this logger) + :param config: Json object containing the configuration of this destination, content of this json is as specified in + the properties of the spec.json file + + :return: AirbyteConnectionStatus indicating a Success or Failure + """ + try: + with establish_connection(config, logger) as connection: + # We can only verify correctness of connection parameters on execution + with connection.cursor() as cursor: + cursor.execute("SELECT 1") + # Test access to the bucket, if S3 strategy is used + create_firebolt_wirter(connection, config, logger) + + return AirbyteConnectionStatus(status=Status.SUCCEEDED) + except Exception as e: + return AirbyteConnectionStatus(status=Status.FAILED, message=f"An exception occurred: {repr(e)}") diff --git a/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/spec.json b/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/spec.json new file mode 100644 index 000000000000..53f6d83ac6fc --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/spec.json @@ -0,0 +1,109 @@ +{ + "documentationUrl": "https://docs.airbyte.io/integrations/destinations/firebolt", + "supported_destination_sync_modes": ["overwrite", "append"], + "supportsIncremental": true, + "supportsDBT": true, + "supportsNormalization": false, + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Firebolt Spec", + "type": "object", + "required": ["username", "password", "database"], + "additionalProperties": false, + "properties": { + "username": { + "type": "string", + "title": "Username", + "description": "Firebolt email address you use to login.", + "examples": ["username@email.com"], + "order": 0 + }, + "password": { + "type": "string", + "title": "Password", + "description": "Firebolt password.", + "airbyte_secret": true, + "order": 1 + }, + "account": { + "type": "string", + "title": "Account", + "description": "Firebolt account to login." + }, + "host": { + "type": "string", + "title": "Host", + "description": "The host name of your Firebolt database.", + "examples": ["api.app.firebolt.io"] + }, + "database": { + "type": "string", + "title": "Database", + "description": "The database to connect to." + }, + "engine": { + "type": "string", + "title": "Engine", + "description": "Engine name or url to connect to." + }, + "loading_method": { + "type": "object", + "title": "Loading Method", + "description": "Loading method used to select the way data will be uploaded to Firebolt", + "oneOf": [ + { + "title": "SQL Inserts", + "additionalProperties": false, + "required": ["method"], + "properties": { + "method": { + "type": "string", + "const": "SQL" + } + } + }, + { + "title": "External Table via S3", + "additionalProperties": false, + "required": [ + "method", + "s3_bucket", + "s3_region", + "aws_key_id", + "aws_key_secret" + ], + "properties": { + "method": { + "type": "string", + "const": "S3" + }, + "s3_bucket": { + "type": "string", + "title": "S3 bucket name", + "description": "The name of the S3 bucket." + }, + "s3_region": { + "type": "string", + "title": "S3 region name", + "description": "Region name of the S3 bucket.", + "examples": ["us-east-1"] + }, + "aws_key_id": { + "type": "string", + "title": "AWS Key ID", + "airbyte_secret": true, + "description": "AWS access key granting read and write access to S3." + }, + "aws_key_secret": { + "type": "string", + "title": "AWS Key Secret", + "airbyte_secret": true, + "description": "Corresponding secret part of the AWS Key" + } + } + } + ] + } + } + } +} diff --git a/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/writer.py b/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/writer.py new file mode 100644 index 000000000000..4e2151ac53b6 --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/writer.py @@ -0,0 +1,235 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import json +from collections import defaultdict +from datetime import datetime +from time import time +from uuid import uuid4 + +import pyarrow as pa +import pyarrow.parquet as pq +from airbyte_cdk import AirbyteLogger +from firebolt.db import Connection +from pyarrow import fs + + +class FireboltWriter: + """ + Base class for shared writer logic. + """ + + flush_interval = 1000 + + def __init__(self, connection: Connection) -> None: + """ + :param connection: Firebolt SDK connection class with established connection + to the databse. + """ + self.connection = connection + self._buffer = defaultdict(list) + self._values = 0 + + def delete_table(self, name: str) -> None: + """ + Delete the resulting table. + Primarily used in Overwrite strategy to clean up previous data. + + :param name: table name to delete. + """ + cursor = self.connection.cursor() + cursor.execute(f"DROP TABLE IF EXISTS _airbyte_raw_{name}") + + def create_raw_table(self, name: str): + """ + Create the resulting _airbyte_raw table. + + :param name: table name to create. + """ + query = f""" + CREATE FACT TABLE IF NOT EXISTS _airbyte_raw_{name} ( + _airbyte_ab_id TEXT, + _airbyte_emitted_at TIMESTAMP, + _airbyte_data TEXT + ) + PRIMARY INDEX _airbyte_ab_id + """ + cursor = self.connection.cursor() + cursor.execute(query) + + def queue_write_data(self, stream_name: str, id: str, time: datetime, record: str) -> None: + """ + Queue up data in a buffer in memory before writing to the database. + When flush_interval is reached data is persisted. + + :param stream_name: name of the stream for which the data corresponds. + :param id: unique identifier of this data row. + :param time: time of writing. + :param record: string representation of the json data payload. + """ + self._buffer[stream_name].append((id, time, record)) + self._values += 1 + if self._values == self.flush_interval: + self._flush() + + def _flush(self): + """ + Stub for the intermediate data flush that's triggered during the + buffering operation. + """ + raise NotImplementedError() + + def flush(self): + """ + Stub for the data flush at the end of writing operation. + """ + raise NotImplementedError() + + +class FireboltS3Writer(FireboltWriter): + """ + Data writer using the S3 strategy. Data is buffered in memory + before being flushed to S3 in .parquet format. At the end of + the operation data is written to Firebolt databse from S3, allowing + greater ingestion speed. + """ + + flush_interval = 100000 + + def __init__(self, connection: Connection, s3_bucket: str, access_key: str, secret_key: str, s3_region: str) -> None: + """ + :param connection: Firebolt SDK connection class with established connection + to the databse. + :param s3_bucket: Intermediate bucket to store the data files before writing them to Firebolt. + Has to be created and accessible. + :param access_key: AWS Access Key ID that has read/write/delete permissions on the files in the bucket. + :param secret_key: Corresponding AWS Secret Key. + :param s3_region: S3 region. Best to keep this the same as Firebolt database region. Default us-east-1. + """ + super().__init__(connection) + self.key_id = access_key + self.secret_key = secret_key + self.s3_bucket = s3_bucket + self._updated_tables = set() + self.unique_dir = f"{int(time())}_{uuid4()}" + self.fs = fs.S3FileSystem(access_key=access_key, secret_key=secret_key, region=s3_region) + + def _flush(self) -> None: + """ + Intermediate data flush that's triggered during the + buffering operation. Uploads data stored in memory to the S3. + """ + for table, data in self._buffer.items(): + key_list, ts_list, payload = zip(*data) + upload_data = [pa.array(key_list), pa.array(ts_list), pa.array(payload)] + pa_table = pa.table(upload_data, names=["_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_data"]) + pq.write_to_dataset(table=pa_table, root_path=f"{self.s3_bucket}/airbyte_output/{self.unique_dir}/{table}", filesystem=self.fs) + # Update tables + self._updated_tables.update(self._buffer.keys()) + self._buffer.clear() + self._values = 0 + + def flush(self) -> None: + """ + Flush any leftover data after ingestion and write from S3 to Firebolt. + Intermediate data on S3 and External Table will be deleted after write is complete. + """ + self._flush() + for table in self._updated_tables: + self.create_raw_table(table) + self.create_external_table(table) + self.ingest_data(table) + self.cleanup(table) + + def create_external_table(self, name: str) -> None: + """ + Create Firebolt External Table to interface with the files on S3. + + :param name: Stream name from which the table name is derived. + """ + query = f""" + CREATE EXTERNAL TABLE IF NOT EXISTS ex_airbyte_raw_{name} ( + _airbyte_ab_id TEXT, + _airbyte_emitted_at TIMESTAMP, + _airbyte_data TEXT + ) + URL = ? + CREDENTIALS = ( AWS_KEY_ID = ? AWS_SECRET_KEY = ? ) + OBJECT_PATTERN = '*.parquet' + TYPE = (PARQUET); + """ + cursor = self.connection.cursor() + cursor.execute(query, parameters=(f"s3://{self.s3_bucket}/airbyte_output/{self.unique_dir}/{name}", self.key_id, self.secret_key)) + + def ingest_data(self, name: str) -> None: + """ + Write data from External Table to the _airbyte_raw table effectively + persisting data in Firebolt. + + :param name: Stream name from which the table name is derived. + """ + query = f"INSERT INTO _airbyte_raw_{name} SELECT * FROM ex_airbyte_raw_{name}" + cursor = self.connection.cursor() + cursor.execute(query) + + def cleanup(self, name: str) -> None: + """ + Clean intermediary External tables and wipe the S3 folder. + + :param name: Stream name from which the table name is derived. + """ + cursor = self.connection.cursor() + cursor.execute(f"DROP TABLE IF EXISTS ex_airbyte_raw_{name}") + self.fs.delete_dir_contents(f"{self.s3_bucket}/airbyte_output/{self.unique_dir}/{name}") + + +class FireboltSQLWriter(FireboltWriter): + """ + Data writer using the SQL writing strategy. Data is buffered in memory + and flushed using INSERT INTO SQL statement. This is less effective strategy + better suited for testing and small data sets. + """ + + flush_interval = 1000 + + def __init__(self, connection: Connection) -> None: + """ + :param connection: Firebolt SDK connection class with established connection + to the databse. + """ + super().__init__(connection) + + def _flush(self) -> None: + """ + Intermediate data flush that's triggered during the + buffering operation. Writes data stored in memory via SQL commands. + """ + cursor = self.connection.cursor() + # id, written_at, data + for table, data in self._buffer.items(): + cursor.executemany(f"INSERT INTO _airbyte_raw_{table} VALUES (?, ?, ?)", parameters_seq=data) + self._buffer.clear() + self._values = 0 + + def flush(self) -> None: + """ + Final data flush after all data has been written to memory. + """ + self._flush() + + +def create_firebolt_wirter(connection: Connection, config: json, logger: AirbyteLogger) -> FireboltWriter: + if config["loading_method"]["method"] == "S3": + logger.info("Using the S3 writing strategy") + writer = FireboltS3Writer( + connection, + config["loading_method"]["s3_bucket"], + config["loading_method"]["aws_key_id"], + config["loading_method"]["aws_key_secret"], + config["loading_method"]["s3_region"], + ) + else: + logger.info("Using the SQL writing strategy") + writer = FireboltSQLWriter(connection) + return writer diff --git a/airbyte-integrations/connectors/destination-firebolt/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/destination-firebolt/integration_tests/configured_catalog.json new file mode 100644 index 000000000000..bdfdcaad3aea --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/integration_tests/configured_catalog.json @@ -0,0 +1,38 @@ +{ + "streams": [ + { + "stream": { + "name": "airbyte_acceptance_table", + "supported_sync_modes": ["full_refresh"], + "source_defined_cursor": false, + "json_schema": { + "type": "object", + "properties": { + "column1": { + "type": "string" + }, + "column2": { + "type": "number" + }, + "column3": { + "type": "string", + "format": "datetime", + "airbyte_type": "timestamp_without_timezone" + }, + "column4": { + "type": "number" + }, + "column5": { + "type": "array", + "items": { + "type": "integer" + } + } + } + } + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/destination-firebolt/integration_tests/integration_test.py b/airbyte-integrations/connectors/destination-firebolt/integration_tests/integration_test.py new file mode 100644 index 000000000000..9c4856855410 --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/integration_tests/integration_test.py @@ -0,0 +1,147 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import random +import string +from datetime import datetime +from json import dumps, load +from typing import Dict +from unittest.mock import MagicMock + +from airbyte_cdk.models import AirbyteMessage, AirbyteRecordMessage, Status, Type +from airbyte_cdk.models.airbyte_protocol import ( + AirbyteStream, + ConfiguredAirbyteCatalog, + ConfiguredAirbyteStream, + DestinationSyncMode, + SyncMode, +) +from destination_firebolt.destination import DestinationFirebolt, establish_connection +from firebolt.common.exception import FireboltError +from pytest import fixture, mark, raises + + +@fixture(scope="module") +def config() -> Dict[str, str]: + with open( + "secrets/config.json", + ) as f: + yield load(f) + + +@fixture(scope="module") +def test_table_name() -> str: + letters = string.ascii_lowercase + rnd_string = "".join(random.choice(letters) for i in range(10)) + return f"airbyte_integration_{rnd_string}" + + +@fixture +def cleanup(config: Dict[str, str], test_table_name: str): + yield + with establish_connection(config, MagicMock()) as connection: + with connection.cursor() as cursor: + cursor.execute(f"DROP TABLE IF EXISTS _airbyte_raw_{test_table_name}") + cursor.execute(f"DROP TABLE IF EXISTS ex_airbyte_raw_{test_table_name}") + + +@fixture +def table_schema() -> str: + schema = { + "type": "object", + "properties": { + "column1": {"type": ["null", "string"]}, + }, + } + return schema + + +@fixture +def configured_catalogue(test_table_name: str, table_schema: str) -> ConfiguredAirbyteCatalog: + append_stream = ConfiguredAirbyteStream( + stream=AirbyteStream(name=test_table_name, json_schema=table_schema), + sync_mode=SyncMode.incremental, + destination_sync_mode=DestinationSyncMode.append, + ) + return ConfiguredAirbyteCatalog(streams=[append_stream]) + + +@fixture(scope="module") +def invalid_config() -> Dict[str, str]: + with open( + "integration_tests/invalid_config.json", + ) as f: + yield load(f) + + +@fixture(scope="module") +def invalid_config_s3() -> Dict[str, str]: + with open( + "integration_tests/invalid_config_s3.json", + ) as f: + yield load(f) + + +@fixture +def airbyte_message1(test_table_name: str): + return AirbyteMessage( + type=Type.RECORD, + record=AirbyteRecordMessage( + stream=test_table_name, + data={"key1": "value1", "key2": 2}, + emitted_at=int(datetime.now().timestamp()) * 1000, + ), + ) + + +@fixture +def airbyte_message2(test_table_name: str): + return AirbyteMessage( + type=Type.RECORD, + record=AirbyteRecordMessage( + stream=test_table_name, + data={"key1": "value2", "key2": 3}, + emitted_at=int(datetime.now().timestamp()) * 1000, + ), + ) + + +@mark.parametrize("config", ["invalid_config", "invalid_config_s3"]) +def test_check_fails(config, request): + destination = DestinationFirebolt() + status = destination.check(logger=MagicMock(), config=config) + assert status.status == Status.FAILED + + +def test_check_succeeds(config, request): + destination = DestinationFirebolt() + status = destination.check(logger=MagicMock(), config=config) + assert status.status == Status.SUCCEEDED + + +def test_write( + config: Dict[str, str], + configured_catalogue: ConfiguredAirbyteCatalog, + airbyte_message1: AirbyteMessage, + airbyte_message2: AirbyteMessage, + test_table_name: str, + cleanup, + request, +): + destination = DestinationFirebolt() + generator = destination.write(config, configured_catalogue, [airbyte_message1, airbyte_message2]) + result = list(generator) + assert len(result) == 0 + with establish_connection(config, MagicMock()) as connection: + with connection.cursor() as cursor: + cursor.execute( + f"SELECT _airbyte_ab_id, _airbyte_emitted_at, _airbyte_data FROM _airbyte_raw_{test_table_name} ORDER BY _airbyte_data" + ) + result = cursor.fetchall() + # Make sure no temporary tables present + with raises(FireboltError): + cursor.execute(f"SELECT TOP 0 * FROM ex_airbyte_raw_{test_table_name}") + assert len(result) == 2 + assert result[0][2] == dumps(airbyte_message1.record.data) + assert result[1][2] == dumps(airbyte_message2.record.data) diff --git a/airbyte-integrations/connectors/destination-firebolt/integration_tests/invalid_config.json b/airbyte-integrations/connectors/destination-firebolt/integration_tests/invalid_config.json new file mode 100644 index 000000000000..f8251d5271fb --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/integration_tests/invalid_config.json @@ -0,0 +1,9 @@ +{ + "username": "xxx", + "password": "xxx", + "database": "non_existing_database_name", + "engine": "database_name_Analytics", + "loading_method": { + "method": "SQL" + } +} diff --git a/airbyte-integrations/connectors/destination-firebolt/integration_tests/invalid_config_s3.json b/airbyte-integrations/connectors/destination-firebolt/integration_tests/invalid_config_s3.json new file mode 100644 index 000000000000..2ab29e87dfe5 --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/integration_tests/invalid_config_s3.json @@ -0,0 +1,13 @@ +{ + "username": "xxx", + "password": "xxx", + "database": "non_existing_database_name", + "engine": "database_name_Analytics", + "loading_method": { + "method": "S3", + "s3_bucket": "sample_bucket", + "s3_region": "us-east-1", + "aws_key_id": "yyy", + "aws_key_secret": "yyy" + } +} diff --git a/airbyte-integrations/connectors/destination-firebolt/integration_tests/messages.jsonl b/airbyte-integrations/connectors/destination-firebolt/integration_tests/messages.jsonl new file mode 100644 index 000000000000..ab871c15bb02 --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/integration_tests/messages.jsonl @@ -0,0 +1,2 @@ +{"type": "RECORD", "record": {"stream": "airbyte_acceptance_table", "data": {"column1": "my_value", "column2": 221, "column3": "2021-01-01T20:10:22", "column4": 1.214, "column5": [1,2,3]}, "emitted_at": 1626172757000}} +{"type": "RECORD", "record": {"stream": "airbyte_acceptance_table", "data": {"column1": "my_value2", "column2": 222, "column3": "2021-01-02T22:10:22", "column5": [1,2,null]}, "emitted_at": 1626172757000}} diff --git a/airbyte-integrations/connectors/destination-firebolt/main.py b/airbyte-integrations/connectors/destination-firebolt/main.py new file mode 100644 index 000000000000..38037d81efb9 --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/main.py @@ -0,0 +1,11 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from destination_firebolt import DestinationFirebolt + +if __name__ == "__main__": + DestinationFirebolt().run(sys.argv[1:]) diff --git a/airbyte-integrations/connectors/destination-firebolt/requirements.txt b/airbyte-integrations/connectors/destination-firebolt/requirements.txt new file mode 100644 index 000000000000..d6e1198b1ab1 --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/requirements.txt @@ -0,0 +1 @@ +-e . diff --git a/airbyte-integrations/connectors/destination-firebolt/setup.py b/airbyte-integrations/connectors/destination-firebolt/setup.py new file mode 100644 index 000000000000..5f5cf855461d --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/setup.py @@ -0,0 +1,23 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = ["airbyte-cdk", "firebolt-sdk>=0.8.0", "pyarrow"] + +TEST_REQUIREMENTS = ["pytest~=6.1"] + +setup( + name="destination_firebolt", + description="Destination implementation for Firebolt.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/destination-firebolt/unit_tests/test_firebolt_destination.py b/airbyte-integrations/connectors/destination-firebolt/unit_tests/test_firebolt_destination.py new file mode 100644 index 000000000000..8525c6114a02 --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/unit_tests/test_firebolt_destination.py @@ -0,0 +1,239 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from datetime import datetime +from typing import Any, Dict +from unittest.mock import MagicMock, call, patch + +from airbyte_cdk.models import ( + AirbyteMessage, + AirbyteRecordMessage, + AirbyteStream, + ConfiguredAirbyteCatalog, + ConfiguredAirbyteStream, + DestinationSyncMode, + Status, + SyncMode, + Type, +) +from destination_firebolt.destination import DestinationFirebolt, establish_connection, parse_config +from pytest import fixture + + +@fixture(params=["my_engine", "my_engine.api.firebolt.io"]) +def config(request: Any) -> Dict[str, str]: + args = { + "database": "my_database", + "username": "my_username", + "password": "my_password", + "engine": request.param, + "loading_method": { + "method": "SQL", + }, + } + return args + + +@fixture +def config_external_table() -> Dict[str, str]: + args = { + "database": "my_database", + "username": "my_username", + "password": "my_password", + "engine": "my_engine", + "loading_method": { + "method": "S3", + "s3_bucket": "my_bucket", + "s3_region": "us-east-1", + "aws_key_id": "aws_key", + "aws_key_secret": "aws_secret", + }, + } + return args + + +@fixture +def config_no_engine() -> Dict[str, str]: + args = { + "database": "my_database", + "username": "my_username", + "password": "my_password", + } + return args + + +@fixture +def logger() -> MagicMock: + return MagicMock() + + +@fixture +def configured_stream1() -> ConfiguredAirbyteStream: + return ConfiguredAirbyteStream( + stream=AirbyteStream( + name="table1", + json_schema={ + "type": "object", + "properties": {"col1": {"type": "string"}, "col2": {"type": "integer"}}, + }, + ), + sync_mode=SyncMode.incremental, + destination_sync_mode=DestinationSyncMode.append, + ) + + +@fixture +def configured_stream2() -> ConfiguredAirbyteStream: + return ConfiguredAirbyteStream( + stream=AirbyteStream( + name="table2", + json_schema={ + "type": "object", + "properties": {"col1": {"type": "string"}, "col2": {"type": "integer"}}, + }, + ), + sync_mode=SyncMode.incremental, + destination_sync_mode=DestinationSyncMode.append, + ) + + +@fixture +def airbyte_message1() -> AirbyteMessage: + return AirbyteMessage( + type=Type.RECORD, + record=AirbyteRecordMessage( + stream="table1", + data={"key1": "value1", "key2": 2}, + emitted_at=int(datetime.now().timestamp()) * 1000, + ), + ) + + +@fixture +def airbyte_message2() -> AirbyteMessage: + return AirbyteMessage( + type=Type.RECORD, + record=AirbyteRecordMessage( + stream="table2", + data={"key1": "value2", "key2": 3}, + emitted_at=int(datetime.now().timestamp()) * 1000, + ), + ) + + +@fixture +def airbyte_state_message() -> AirbyteMessage: + return AirbyteMessage(type=Type.STATE) + + +def test_parse_config(config: Dict[str, str]): + config["engine"] = "override_engine" + result = parse_config(config) + assert result["database"] == "my_database" + assert result["engine_name"] == "override_engine" + assert result["auth"].username == "my_username" + assert result["auth"].password == "my_password" + config["engine"] = "override_engine.api.firebolt.io" + result = parse_config(config) + assert result["engine_url"] == "override_engine.api.firebolt.io" + + +@patch("destination_firebolt.destination.connect", MagicMock()) +def test_connection(config: Dict[str, str], config_no_engine: Dict[str, str], logger: MagicMock) -> None: + establish_connection(config, logger) + logger.reset_mock() + establish_connection(config_no_engine, logger) + assert any(["default engine" in msg.args[0] for msg in logger.info.mock_calls]), "No message on using default engine" + # Check no log object + establish_connection(config) + + +@patch("destination_firebolt.writer.FireboltS3Writer") +@patch("destination_firebolt.destination.connect") +def test_check( + mock_connection: MagicMock, mock_writer: MagicMock, config: Dict[str, str], config_external_table: Dict[str, str], logger: MagicMock +): + destination = DestinationFirebolt() + status = destination.check(logger, config) + assert status.status == Status.SUCCEEDED + mock_writer.assert_not_called() + status = destination.check(logger, config_external_table) + assert status.status == Status.SUCCEEDED + mock_writer.assert_called_once() + mock_connection().__enter__().cursor().__enter__().execute.side_effect = Exception("my exception") + status = destination.check(logger, config) + assert status.status == Status.FAILED + + +@patch("destination_firebolt.writer.FireboltSQLWriter") +@patch("destination_firebolt.destination.establish_connection") +def test_sql_write_append( + mock_connection: MagicMock, + mock_writer: MagicMock, + config: Dict[str, str], + configured_stream1: ConfiguredAirbyteStream, + configured_stream2: ConfiguredAirbyteStream, + airbyte_message1: AirbyteMessage, + airbyte_message2: AirbyteMessage, + airbyte_state_message: AirbyteMessage, +) -> None: + catalog = ConfiguredAirbyteCatalog(streams=[configured_stream1, configured_stream2]) + + destination = DestinationFirebolt() + result = destination.write(config, catalog, [airbyte_message1, airbyte_state_message, airbyte_message2]) + + assert list(result) == [airbyte_state_message] + mock_writer.return_value.delete_table.assert_not_called() + mock_writer.return_value.create_raw_table.mock_calls = [call(mock_connection, "table1"), call(mock_connection, "table2")] + assert len(mock_writer.return_value.queue_write_data.mock_calls) == 2 + mock_writer.return_value.flush.assert_called_once() + + +@patch("destination_firebolt.writer.FireboltS3Writer") +@patch("destination_firebolt.writer.FireboltSQLWriter") +@patch("destination_firebolt.destination.establish_connection") +def test_sql_write_overwrite( + mock_connection: MagicMock, + mock_writer: MagicMock, + mock_s3_writer: MagicMock, + config: Dict[str, str], + configured_stream1: ConfiguredAirbyteStream, + configured_stream2: ConfiguredAirbyteStream, + airbyte_message1: AirbyteMessage, + airbyte_message2: AirbyteMessage, + airbyte_state_message: AirbyteMessage, +): + # Overwrite triggers a delete + configured_stream1.destination_sync_mode = DestinationSyncMode.overwrite + catalog = ConfiguredAirbyteCatalog(streams=[configured_stream1, configured_stream2]) + + destination = DestinationFirebolt() + result = destination.write(config, catalog, [airbyte_message1, airbyte_state_message, airbyte_message2]) + + mock_s3_writer.assert_not_called() + assert list(result) == [airbyte_state_message] + mock_writer.return_value.delete_table.assert_called_once_with("table1") + mock_writer.return_value.create_raw_table.mock_calls = [call(mock_connection, "table1"), call(mock_connection, "table2")] + + +@patch("destination_firebolt.writer.FireboltS3Writer") +@patch("destination_firebolt.writer.FireboltSQLWriter") +@patch("destination_firebolt.destination.establish_connection", MagicMock()) +def test_s3_write( + mock_sql_writer: MagicMock, + mock_s3_writer: MagicMock, + config_external_table: Dict[str, str], + configured_stream1: ConfiguredAirbyteStream, + configured_stream2: ConfiguredAirbyteStream, + airbyte_message1: AirbyteMessage, + airbyte_message2: AirbyteMessage, + airbyte_state_message: AirbyteMessage, +): + catalog = ConfiguredAirbyteCatalog(streams=[configured_stream1, configured_stream2]) + + destination = DestinationFirebolt() + result = destination.write(config_external_table, catalog, [airbyte_message1, airbyte_state_message, airbyte_message2]) + assert list(result) == [airbyte_state_message] + mock_sql_writer.assert_not_called() + mock_s3_writer.assert_called_once() diff --git a/airbyte-integrations/connectors/destination-firebolt/unit_tests/test_writer.py b/airbyte-integrations/connectors/destination-firebolt/unit_tests/test_writer.py new file mode 100644 index 000000000000..6c42bab1c0fa --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/unit_tests/test_writer.py @@ -0,0 +1,156 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from typing import Any, Union +from unittest.mock import ANY, MagicMock, call, patch + +from destination_firebolt.writer import FireboltS3Writer, FireboltSQLWriter +from pytest import fixture, mark + + +@fixture +def connection() -> MagicMock: + return MagicMock() + + +@fixture +def sql_writer(connection: MagicMock) -> FireboltSQLWriter: + return FireboltSQLWriter(connection) + + +@fixture +@patch("destination_firebolt.writer.time", MagicMock(return_value=111)) +@patch("destination_firebolt.writer.uuid4", MagicMock(return_value="dummy-uuid")) +def s3_writer(connection: MagicMock) -> FireboltS3Writer: + # Make sure S3FileSystem mock is reset each time + with patch("destination_firebolt.writer.fs.S3FileSystem", MagicMock()): + return FireboltS3Writer(connection, "dummy_bucket", "access_key", "secret_key", "us-east-1") + + +def test_sql_default(sql_writer: FireboltSQLWriter) -> None: + assert len(sql_writer._buffer) == 0 + assert sql_writer.flush_interval == 1000 + + +@mark.parametrize("writer", ["sql_writer", "s3_writer"]) +def test_sql_create(connection: MagicMock, writer: Union[FireboltSQLWriter, FireboltS3Writer], request: Any) -> None: + writer = request.getfixturevalue(writer) + expected_query = """ + CREATE FACT TABLE IF NOT EXISTS _airbyte_raw_dummy ( + _airbyte_ab_id TEXT, + _airbyte_emitted_at TIMESTAMP, + _airbyte_data TEXT + ) + PRIMARY INDEX _airbyte_ab_id + """ + writer.create_raw_table("dummy") + connection.cursor.return_value.execute.assert_called_once_with(expected_query) + + +def test_data_buffering(sql_writer: FireboltSQLWriter) -> None: + sql_writer.queue_write_data("dummy", "id1", 20200101, '{"key": "value"}') + sql_writer._buffer["dummy"][0] == ("id1", 20200101, '{"key": "value"}') + assert len(sql_writer._buffer["dummy"]) == 1 + assert len(sql_writer._buffer.keys()) == 1 + sql_writer.queue_write_data("dummy", "id2", 20200102, '{"key2": "value2"}') + sql_writer._buffer["dummy"][0] == ("id2", 20200102, '{"key2": "value2"}') + assert len(sql_writer._buffer["dummy"]) == 2 + assert len(sql_writer._buffer.keys()) == 1 + sql_writer.queue_write_data("dummy2", "id3", 20200103, '{"key3": "value3"}') + sql_writer._buffer["dummy"][0] == ("id3", 20200103, '{"key3": "value3"}') + assert len(sql_writer._buffer["dummy"]) == 2 + assert len(sql_writer._buffer["dummy2"]) == 1 + assert len(sql_writer._buffer.keys()) == 2 + + +def test_data_auto_flush_one_table(connection: MagicMock, sql_writer: FireboltSQLWriter) -> None: + sql_writer.flush_interval = 2 + sql_writer.queue_write_data("dummy", "id1", 20200101, '{"key": "value"}') + connection.cursor.return_value.executemany.assert_not_called() + assert sql_writer._values == 1 + sql_writer.queue_write_data("dummy", "id1", 20200101, '{"key": "value"}') + connection.cursor.return_value.executemany.assert_called_once() + assert len(sql_writer._buffer.keys()) == 0 + assert sql_writer._values == 0 + sql_writer.queue_write_data("dummy", "id1", 20200101, '{"key": "value"}') + assert len(sql_writer._buffer.keys()) == 1 + + +def test_data_auto_flush_multi_tables(connection: MagicMock, sql_writer: FireboltSQLWriter) -> None: + sql_writer.flush_interval = 2 + sql_writer.queue_write_data("dummy", "id1", 20200101, '{"key": "value"}') + connection.cursor.return_value.executemany.assert_not_called() + assert sql_writer._values == 1 + sql_writer.queue_write_data("dummy2", "id1", 20200101, '{"key": "value"}') + assert len(connection.cursor.return_value.executemany.mock_calls) == 2 + assert len(sql_writer._buffer.keys()) == 0 + assert sql_writer._values == 0 + + +def test_s3_default(s3_writer: FireboltS3Writer) -> None: + assert s3_writer.flush_interval == 100000 + assert s3_writer._values == 0 + assert len(s3_writer._buffer.keys()) == 0 + + +def test_s3_delete_tables(connection: MagicMock, s3_writer: FireboltS3Writer) -> None: + expected_sql = "DROP TABLE IF EXISTS _airbyte_raw_dummy" + s3_writer.delete_table("dummy") + connection.cursor.return_value.execute.assert_called_once_with(expected_sql) + + +@patch("pyarrow.parquet.write_to_dataset") +def test_s3_data_auto_flush_one_table(mock_write: MagicMock, s3_writer: FireboltS3Writer) -> None: + s3_writer.flush_interval = 2 + s3_writer.queue_write_data("dummy", "id1", 20200101, '{"key": "value"}') + mock_write.assert_not_called() + assert s3_writer._values == 1 + s3_writer.queue_write_data("dummy", "id1", 20200101, '{"key": "value"}') + mock_write.assert_called_once_with(table=ANY, root_path="dummy_bucket/airbyte_output/111_dummy-uuid/dummy", filesystem=s3_writer.fs) + assert len(s3_writer._buffer.keys()) == 0 + assert s3_writer._values == 0 + assert s3_writer._updated_tables == set(["dummy"]) + mock_write.reset_mock() + s3_writer.queue_write_data("dummy", "id1", 20200101, '{"key": "value"}') + mock_write.assert_not_called() + assert len(s3_writer._buffer.keys()) == 1 + assert s3_writer._updated_tables == set(["dummy"]) + + +@patch("pyarrow.parquet.write_to_dataset") +def test_s3_data_auto_flush_multi_tables(mock_write: MagicMock, s3_writer: FireboltS3Writer) -> None: + s3_writer.flush_interval = 2 + s3_writer.queue_write_data("dummy", "id1", 20200101, '{"key": "value"}') + mock_write.assert_not_called() + assert s3_writer._values == 1 + s3_writer.queue_write_data("dummy2", "id1", 20200101, '{"key": "value"}') + assert mock_write.mock_calls == [ + call(table=ANY, root_path="dummy_bucket/airbyte_output/111_dummy-uuid/dummy", filesystem=s3_writer.fs), + call(table=ANY, root_path="dummy_bucket/airbyte_output/111_dummy-uuid/dummy2", filesystem=s3_writer.fs), + ] + assert len(s3_writer._buffer.keys()) == 0 + assert s3_writer._values == 0 + assert s3_writer._updated_tables == set(["dummy", "dummy2"]) + + +def test_s3_final_flush(connection: MagicMock, s3_writer: FireboltS3Writer) -> None: + s3_writer._updated_tables = set(["dummy", "dummy2"]) + s3_writer.flush() + assert len(connection.cursor.return_value.execute.mock_calls) == 8 + expected_url1 = "s3://dummy_bucket/airbyte_output/111_dummy-uuid/dummy" + expected_url2 = "s3://dummy_bucket/airbyte_output/111_dummy-uuid/dummy2" + connection.cursor.return_value.execute.assert_any_call(ANY, parameters=(expected_url1, "access_key", "secret_key")) + connection.cursor.return_value.execute.assert_any_call(ANY, parameters=(expected_url2, "access_key", "secret_key")) + expected_query1 = "INSERT INTO _airbyte_raw_dummy SELECT * FROM ex_airbyte_raw_dummy" + expected_query2 = "INSERT INTO _airbyte_raw_dummy2 SELECT * FROM ex_airbyte_raw_dummy2" + connection.cursor.return_value.execute.assert_any_call(expected_query1) + connection.cursor.return_value.execute.assert_any_call(expected_query2) + + +def test_s3_cleanup(connection: MagicMock, s3_writer: FireboltS3Writer) -> None: + expected_sql = "DROP TABLE IF EXISTS ex_airbyte_raw_my_table" + bucket_path = "dummy_bucket/airbyte_output/111_dummy-uuid/my_table" + s3_writer.cleanup("my_table") + connection.cursor.return_value.execute.assert_called_once_with(expected_sql) + s3_writer.fs.delete_dir_contents.assert_called_once_with(bucket_path) diff --git a/airbyte-integrations/connectors/destination-gcs/Dockerfile b/airbyte-integrations/connectors/destination-gcs/Dockerfile index 559f42a32dd1..1d2b3725e12a 100644 --- a/airbyte-integrations/connectors/destination-gcs/Dockerfile +++ b/airbyte-integrations/connectors/destination-gcs/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-gcs COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.2.6 +LABEL io.airbyte.version=0.2.9 LABEL io.airbyte.name=airbyte/destination-gcs diff --git a/airbyte-integrations/connectors/destination-gcs/build.gradle b/airbyte-integrations/connectors/destination-gcs/build.gradle index 902577aa9222..fd6003415fc3 100644 --- a/airbyte-integrations/connectors/destination-gcs/build.gradle +++ b/airbyte-integrations/connectors/destination-gcs/build.gradle @@ -26,9 +26,9 @@ dependencies { implementation 'com.github.alexmojaki:s3-stream-upload:2.2.2' // parquet - implementation group: 'org.apache.hadoop', name: 'hadoop-common', version: '3.3.0' - implementation group: 'org.apache.hadoop', name: 'hadoop-aws', version: '3.3.0' - implementation group: 'org.apache.hadoop', name: 'hadoop-mapreduce-client-core', version: '3.3.0' + implementation group: 'org.apache.hadoop', name: 'hadoop-common', version: '3.3.3' + implementation group: 'org.apache.hadoop', name: 'hadoop-aws', version: '3.3.3' + implementation group: 'org.apache.hadoop', name: 'hadoop-mapreduce-client-core', version: '3.3.3' implementation group: 'org.apache.parquet', name: 'parquet-avro', version: '1.12.0' implementation group: 'com.github.airbytehq', name: 'json-avro-converter', version: '1.0.1' diff --git a/airbyte-integrations/connectors/destination-gcs/src/main/java/io/airbyte/integrations/destination/gcs/GcsDestinationConfig.java b/airbyte-integrations/connectors/destination-gcs/src/main/java/io/airbyte/integrations/destination/gcs/GcsDestinationConfig.java index ba070797edd9..16ddb90ce146 100644 --- a/airbyte-integrations/connectors/destination-gcs/src/main/java/io/airbyte/integrations/destination/gcs/GcsDestinationConfig.java +++ b/airbyte-integrations/connectors/destination-gcs/src/main/java/io/airbyte/integrations/destination/gcs/GcsDestinationConfig.java @@ -39,7 +39,6 @@ public GcsDestinationConfig(final String bucketName, bucketRegion, S3DestinationConstants.DEFAULT_PATH_FORMAT, credentialConfig.getS3CredentialConfig().orElseThrow(), - S3DestinationConstants.DEFAULT_PART_SIZE_MB, formatConfig, null); diff --git a/airbyte-integrations/connectors/destination-gcs/src/main/java/io/airbyte/integrations/destination/gcs/avro/GcsAvroWriter.java b/airbyte-integrations/connectors/destination-gcs/src/main/java/io/airbyte/integrations/destination/gcs/avro/GcsAvroWriter.java index 4771910da809..592e1a74ce35 100644 --- a/airbyte-integrations/connectors/destination-gcs/src/main/java/io/airbyte/integrations/destination/gcs/avro/GcsAvroWriter.java +++ b/airbyte-integrations/connectors/destination-gcs/src/main/java/io/airbyte/integrations/destination/gcs/avro/GcsAvroWriter.java @@ -4,6 +4,8 @@ package io.airbyte.integrations.destination.gcs.avro; +import static io.airbyte.integrations.destination.s3.util.StreamTransferManagerFactory.DEFAULT_PART_SIZE_MB; + import alex.mojaki.s3upload.MultiPartOutputStream; import alex.mojaki.s3upload.StreamTransferManager; import com.amazonaws.services.s3.AmazonS3; @@ -76,7 +78,7 @@ public GcsAvroWriter(final GcsDestinationConfig config, this.avroRecordFactory = new AvroRecordFactory(schema, converter); this.uploadManager = StreamTransferManagerFactory .create(config.getBucketName(), objectKey, s3Client) - .setPartSize(config.getFormatConfig().getPartSize()) + .setPartSize((long) DEFAULT_PART_SIZE_MB) .get(); // We only need one output stream as we only have one input stream. This is reasonably performant. this.outputStream = uploadManager.getMultiPartOutputStreams().get(0); diff --git a/airbyte-integrations/connectors/destination-gcs/src/main/java/io/airbyte/integrations/destination/gcs/csv/GcsCsvWriter.java b/airbyte-integrations/connectors/destination-gcs/src/main/java/io/airbyte/integrations/destination/gcs/csv/GcsCsvWriter.java index e8104e1fc2f8..1a50b4636485 100644 --- a/airbyte-integrations/connectors/destination-gcs/src/main/java/io/airbyte/integrations/destination/gcs/csv/GcsCsvWriter.java +++ b/airbyte-integrations/connectors/destination-gcs/src/main/java/io/airbyte/integrations/destination/gcs/csv/GcsCsvWriter.java @@ -4,6 +4,8 @@ package io.airbyte.integrations.destination.gcs.csv; +import static io.airbyte.integrations.destination.s3.util.StreamTransferManagerFactory.DEFAULT_PART_SIZE_MB; + import alex.mojaki.s3upload.MultiPartOutputStream; import alex.mojaki.s3upload.StreamTransferManager; import com.amazonaws.services.s3.AmazonS3; @@ -58,7 +60,7 @@ public GcsCsvWriter(final GcsDestinationConfig config, this.uploadManager = StreamTransferManagerFactory .create(config.getBucketName(), objectKey, s3Client) - .setPartSize(config.getFormatConfig().getPartSize()) + .setPartSize((long) DEFAULT_PART_SIZE_MB) .get(); // We only need one output stream as we only have one input stream. This is reasonably performant. this.outputStream = uploadManager.getMultiPartOutputStreams().get(0); diff --git a/airbyte-integrations/connectors/destination-gcs/src/main/java/io/airbyte/integrations/destination/gcs/jsonl/GcsJsonlWriter.java b/airbyte-integrations/connectors/destination-gcs/src/main/java/io/airbyte/integrations/destination/gcs/jsonl/GcsJsonlWriter.java index 7590c39d8ce1..5a930f267309 100644 --- a/airbyte-integrations/connectors/destination-gcs/src/main/java/io/airbyte/integrations/destination/gcs/jsonl/GcsJsonlWriter.java +++ b/airbyte-integrations/connectors/destination-gcs/src/main/java/io/airbyte/integrations/destination/gcs/jsonl/GcsJsonlWriter.java @@ -54,7 +54,6 @@ public GcsJsonlWriter(final GcsDestinationConfig config, this.uploadManager = StreamTransferManagerFactory .create(config.getBucketName(), objectKey, s3Client) - .setPartSize(config.getFormatConfig().getPartSize()) .get(); // We only need one output stream as we only have one input stream. This is reasonably performant. diff --git a/airbyte-integrations/connectors/destination-gcs/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-gcs/src/main/resources/spec.json index b0d566f0eda9..13e68ca7fffc 100644 --- a/airbyte-integrations/connectors/destination-gcs/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/destination-gcs/src/main/resources/spec.json @@ -10,7 +10,6 @@ "title": "GCS Destination Spec", "type": "object", "required": ["gcs_bucket_name", "gcs_bucket_path", "credential", "format"], - "additionalProperties": false, "properties": { "gcs_bucket_name": { "title": "GCS Bucket Name", @@ -226,13 +225,6 @@ } } ] - }, - "part_size_mb": { - "title": "Block Size (MB) for GCS multipart upload (Optional)", - "description": "This is the size of a \"Part\" being buffered in memory. It limits the memory usage when writing. Larger values will allow to upload a bigger files and improve the speed, but consumes9 more memory. Allowed values: min=5MB, max=525MB Default: 5MB.", - "type": "integer", - "default": 5, - "examples": [5] } } }, @@ -252,13 +244,6 @@ "default": "No flattening", "enum": ["No flattening", "Root level flattening"] }, - "part_size_mb": { - "title": "Block Size (MB) for GCS multipart upload (Optional)", - "description": "This is the size of a \"Part\" being buffered in memory. It limits the memory usage when writing. Larger values will allow to upload a bigger files and improve the speed, but consumes9 more memory. Allowed values: min=5MB, max=525MB Default: 5MB.", - "type": "integer", - "default": 5, - "examples": [5] - }, "compression": { "title": "Compression", "type": "object", @@ -299,13 +284,6 @@ "enum": ["JSONL"], "default": "JSONL" }, - "part_size_mb": { - "title": "Block Size (MB) for GCS multipart upload (Optional)", - "description": "This is the size of a \"Part\" being buffered in memory. It limits the memory usage when writing. Larger values will allow to upload a bigger files and improve the speed, but consumes9 more memory. Allowed values: min=5MB, max=525MB Default: 5MB.", - "type": "integer", - "default": 5, - "examples": [5] - }, "compression": { "title": "Compression", "type": "object", diff --git a/airbyte-integrations/connectors/destination-gcs/src/test-integration/java/io/airbyte/integrations/destination/gcs/GcsAvroDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-gcs/src/test-integration/java/io/airbyte/integrations/destination/gcs/GcsAvroDestinationAcceptanceTest.java index 447e93292fd2..a22f84f43f66 100644 --- a/airbyte-integrations/connectors/destination-gcs/src/test-integration/java/io/airbyte/integrations/destination/gcs/GcsAvroDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-gcs/src/test-integration/java/io/airbyte/integrations/destination/gcs/GcsAvroDestinationAcceptanceTest.java @@ -14,15 +14,19 @@ import io.airbyte.integrations.destination.s3.avro.JsonFieldNameUpdater; import io.airbyte.integrations.destination.s3.util.AvroRecordHelper; import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; +import java.util.HashMap; import java.util.LinkedList; import java.util.List; +import java.util.Map; +import java.util.Set; +import org.apache.avro.Schema.Type; import org.apache.avro.file.DataFileReader; import org.apache.avro.file.SeekableByteArrayInput; import org.apache.avro.generic.GenericData; import org.apache.avro.generic.GenericData.Record; import org.apache.avro.generic.GenericDatumReader; -public class GcsAvroDestinationAcceptanceTest extends GcsDestinationAcceptanceTest { +public class GcsAvroDestinationAcceptanceTest extends GcsAvroParquetDestinationAcceptanceTest { protected GcsAvroDestinationAcceptanceTest() { super(S3Format.AVRO); @@ -71,4 +75,25 @@ protected List retrieveRecords(final TestDestinationEnv testEnv, return jsonRecords; } + @Override + protected Map> retrieveDataTypesFromPersistedFiles(final String streamName, final String namespace) throws Exception { + + final List objectSummaries = getAllSyncedObjects(streamName, namespace); + Map> resultDataTypes = new HashMap<>(); + + for (final S3ObjectSummary objectSummary : objectSummaries) { + final S3Object object = s3Client.getObject(objectSummary.getBucketName(), objectSummary.getKey()); + try (final DataFileReader dataFileReader = new DataFileReader<>( + new SeekableByteArrayInput(object.getObjectContent().readAllBytes()), + new GenericDatumReader<>())) { + while (dataFileReader.hasNext()) { + final GenericData.Record record = dataFileReader.next(); + Map> actualDataTypes = getTypes(record); + resultDataTypes.putAll(actualDataTypes); + } + } + } + return resultDataTypes; + } + } diff --git a/airbyte-integrations/connectors/destination-gcs/src/test-integration/java/io/airbyte/integrations/destination/gcs/GcsAvroParquetDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-gcs/src/test-integration/java/io/airbyte/integrations/destination/gcs/GcsAvroParquetDestinationAcceptanceTest.java new file mode 100644 index 000000000000..110b847ce0dd --- /dev/null +++ b/airbyte-integrations/connectors/destination-gcs/src/test-integration/java/io/airbyte/integrations/destination/gcs/GcsAvroParquetDestinationAcceptanceTest.java @@ -0,0 +1,146 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.gcs; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.resources.MoreResources; +import io.airbyte.integrations.destination.s3.S3Format; +import io.airbyte.integrations.destination.s3.avro.JsonSchemaType; +import io.airbyte.integrations.standardtest.destination.NumberDataTypeTestArgumentProvider; +import io.airbyte.protocol.models.AirbyteCatalog; +import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.CatalogHelpers; +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.StreamSupport; +import org.apache.avro.Schema; +import org.apache.avro.Schema.Field; +import org.apache.avro.Schema.Type; +import org.apache.avro.generic.GenericData.Record; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; + +public abstract class GcsAvroParquetDestinationAcceptanceTest extends GcsDestinationAcceptanceTest { + + protected GcsAvroParquetDestinationAcceptanceTest(S3Format s3Format) { + super(s3Format); + } + + @ParameterizedTest + @ArgumentsSource(NumberDataTypeTestArgumentProvider.class) + public void testNumberDataType(String catalogFileName, String messagesFileName) throws Exception { + final AirbyteCatalog catalog = readCatalogFromFile(catalogFileName); + final List messages = readMessagesFromFile(messagesFileName); + + final JsonNode config = getConfig(); + final String defaultSchema = getDefaultSchema(config); + final ConfiguredAirbyteCatalog configuredCatalog = CatalogHelpers.toDefaultConfiguredCatalog(catalog); + runSyncAndVerifyStateOutput(config, messages, configuredCatalog, false); + + for (final AirbyteStream stream : catalog.getStreams()) { + final String streamName = stream.getName(); + final String schema = stream.getNamespace() != null ? stream.getNamespace() : defaultSchema; + + Map> actualSchemaTypes = retrieveDataTypesFromPersistedFiles(streamName, schema); + Map> expectedSchemaTypes = retrieveExpectedDataTypes(stream); + + assertEquals(expectedSchemaTypes, actualSchemaTypes); + } + } + + private Map> retrieveExpectedDataTypes(AirbyteStream stream) { + Iterable iterableNames = () -> stream.getJsonSchema().get("properties").fieldNames(); + Map nameToNode = StreamSupport.stream(iterableNames.spliterator(), false) + .collect(Collectors.toMap( + Function.identity(), + name -> getJsonNode(stream, name))); + + return nameToNode + .entrySet() + .stream() + .collect(Collectors.toMap( + Entry::getKey, + entry -> getExpectedSchemaType(entry.getValue()))); + } + + private JsonNode getJsonNode(AirbyteStream stream, String name) { + JsonNode properties = stream.getJsonSchema().get("properties"); + if (properties.size() == 1) { + return properties.get("data"); + } + return properties.get(name).get("items"); + } + + private Set getExpectedSchemaType(JsonNode fieldDefinition) { + final JsonNode typeProperty = fieldDefinition.get("type"); + final JsonNode airbyteTypeProperty = fieldDefinition.get("airbyte_type"); + final String airbyteTypePropertyText = airbyteTypeProperty == null ? null : airbyteTypeProperty.asText(); + return Arrays.stream(JsonSchemaType.values()) + .filter( + value -> value.getJsonSchemaType().equals(typeProperty.asText()) && compareAirbyteTypes(airbyteTypePropertyText, value)) + .map(JsonSchemaType::getAvroType) + .collect(Collectors.toSet()); + } + + private boolean compareAirbyteTypes(String airbyteTypePropertyText, JsonSchemaType value) { + if (airbyteTypePropertyText == null) { + return value.getJsonSchemaAirbyteType() == null; + } + return airbyteTypePropertyText.equals(value.getJsonSchemaAirbyteType()); + } + + private AirbyteCatalog readCatalogFromFile(final String catalogFilename) throws IOException { + return Jsons.deserialize(MoreResources.readResource(catalogFilename), AirbyteCatalog.class); + } + + private List readMessagesFromFile(final String messagesFilename) throws IOException { + return MoreResources.readResource(messagesFilename).lines() + .map(record -> Jsons.deserialize(record, AirbyteMessage.class)).collect(Collectors.toList()); + } + + protected abstract Map> retrieveDataTypesFromPersistedFiles(final String streamName, final String namespace) throws Exception; + + protected Map> getTypes(Record record) { + + List fieldList = record + .getSchema() + .getFields() + .stream() + .filter(field -> !field.name().startsWith("_airbyte")) + .toList(); + + if (fieldList.size() == 1) { + return fieldList + .stream() + .collect( + Collectors.toMap( + Field::name, + field -> field.schema().getTypes().stream().map(Schema::getType).filter(type -> !type.equals(Type.NULL)) + .collect(Collectors.toSet()))); + } else { + return fieldList + .stream() + .collect( + Collectors.toMap( + Field::name, + field -> field.schema().getTypes() + .stream().filter(type -> !type.getType().equals(Type.NULL)) + .flatMap(type -> type.getElementType().getTypes().stream()).map(Schema::getType).filter(type -> !type.equals(Type.NULL)) + .collect(Collectors.toSet()))); + } + } + +} diff --git a/airbyte-integrations/connectors/destination-gcs/src/test-integration/java/io/airbyte/integrations/destination/gcs/GcsParquetDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-gcs/src/test-integration/java/io/airbyte/integrations/destination/gcs/GcsParquetDestinationAcceptanceTest.java index fbbda1270e96..4ba7afe1b5d4 100644 --- a/airbyte-integrations/connectors/destination-gcs/src/test-integration/java/io/airbyte/integrations/destination/gcs/GcsParquetDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-gcs/src/test-integration/java/io/airbyte/integrations/destination/gcs/GcsParquetDestinationAcceptanceTest.java @@ -13,20 +13,25 @@ import io.airbyte.integrations.destination.s3.S3Format; import io.airbyte.integrations.destination.s3.avro.AvroConstants; import io.airbyte.integrations.destination.s3.avro.JsonFieldNameUpdater; +import io.airbyte.integrations.destination.s3.parquet.S3ParquetWriter; import io.airbyte.integrations.destination.s3.util.AvroRecordHelper; import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; +import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Set; +import org.apache.avro.Schema.Type; import org.apache.avro.generic.GenericData; +import org.apache.avro.generic.GenericData.Record; import org.apache.hadoop.conf.Configuration; import org.apache.parquet.avro.AvroReadSupport; import org.apache.parquet.hadoop.ParquetReader; -public class GcsParquetDestinationAcceptanceTest extends GcsDestinationAcceptanceTest { +public class GcsParquetDestinationAcceptanceTest extends GcsAvroParquetDestinationAcceptanceTest { protected GcsParquetDestinationAcceptanceTest() { super(S3Format.PARQUET); @@ -78,4 +83,30 @@ protected List retrieveRecords(final TestDestinationEnv testEnv, return jsonRecords; } + @Override + protected Map> retrieveDataTypesFromPersistedFiles(final String streamName, final String namespace) throws Exception { + + final List objectSummaries = getAllSyncedObjects(streamName, namespace); + final Map> resultDataTypes = new HashMap<>(); + + for (final S3ObjectSummary objectSummary : objectSummaries) { + final S3Object object = s3Client.getObject(objectSummary.getBucketName(), objectSummary.getKey()); + final URI uri = new URI(String.format("s3a://%s/%s", object.getBucketName(), object.getKey())); + final var path = new org.apache.hadoop.fs.Path(uri); + final Configuration hadoopConfig = S3ParquetWriter.getHadoopConfig(config); + + try (final ParquetReader parquetReader = ParquetReader.builder(new AvroReadSupport<>(), path) + .withConf(hadoopConfig) + .build()) { + GenericData.Record record; + while ((record = parquetReader.read()) != null) { + Map> actualDataTypes = getTypes(record); + resultDataTypes.putAll(actualDataTypes); + } + } + } + + return resultDataTypes; + } + } diff --git a/airbyte-integrations/connectors/destination-gcs/src/test/java/io/airbyte/integrations/destination/gcs/avro/GcsAvroFormatConfigTest.java b/airbyte-integrations/connectors/destination-gcs/src/test/java/io/airbyte/integrations/destination/gcs/avro/GcsAvroFormatConfigTest.java index 5fc6a590b378..c2f0aa5c1791 100644 --- a/airbyte-integrations/connectors/destination-gcs/src/test/java/io/airbyte/integrations/destination/gcs/avro/GcsAvroFormatConfigTest.java +++ b/airbyte-integrations/connectors/destination-gcs/src/test/java/io/airbyte/integrations/destination/gcs/avro/GcsAvroFormatConfigTest.java @@ -5,6 +5,7 @@ package io.airbyte.integrations.destination.gcs.avro; import static com.amazonaws.services.s3.internal.Constants.MB; +import static io.airbyte.integrations.destination.s3.util.StreamTransferManagerFactory.DEFAULT_PART_SIZE_MB; import static org.junit.jupiter.api.Assertions.assertEquals; import alex.mojaki.s3upload.StreamTransferManager; @@ -13,7 +14,6 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.destination.gcs.GcsDestinationConfig; import io.airbyte.integrations.destination.gcs.util.ConfigTestUtils; -import io.airbyte.integrations.destination.s3.S3DestinationConstants; import io.airbyte.integrations.destination.s3.S3FormatConfig; import io.airbyte.integrations.destination.s3.avro.S3AvroFormatConfig; import io.airbyte.integrations.destination.s3.util.StreamTransferManagerFactory; @@ -104,8 +104,7 @@ public void testParseCodecConfigInvalid() { public void testHandlePartSizeConfig() throws IllegalAccessException { final JsonNode config = ConfigTestUtils.getBaseConfig(Jsons.deserialize("{\n" - + " \"format_type\": \"AVRO\",\n" - + " \"part_size_mb\": 6\n" + + " \"format_type\": \"AVRO\"\n" + "}")); final GcsDestinationConfig gcsDestinationConfig = GcsDestinationConfig @@ -114,15 +113,13 @@ public void testHandlePartSizeConfig() throws IllegalAccessException { final S3FormatConfig formatConfig = gcsDestinationConfig.getFormatConfig(); assertEquals("AVRO", formatConfig.getFormat().name()); - assertEquals(6, formatConfig.getPartSize()); // Assert that is set properly in config final StreamTransferManager streamTransferManager = StreamTransferManagerFactory .create(gcsDestinationConfig.getBucketName(), "objectKey", null) - .setPartSize(gcsDestinationConfig.getFormatConfig().getPartSize()) .get(); final Integer partSizeBytes = (Integer) FieldUtils.readField(streamTransferManager, "partSize", true); - assertEquals(MB * 6, partSizeBytes); + assertEquals(MB * DEFAULT_PART_SIZE_MB, partSizeBytes); } @Test @@ -138,11 +135,10 @@ public void testHandleAbsenceOfPartSizeConfig() throws IllegalAccessException { final StreamTransferManager streamTransferManager = StreamTransferManagerFactory .create(gcsDestinationConfig.getBucketName(), "objectKey", null) - .setPartSize(gcsDestinationConfig.getFormatConfig().getPartSize()) .get(); final Integer partSizeBytes = (Integer) FieldUtils.readField(streamTransferManager, "partSize", true); - assertEquals(MB * S3DestinationConstants.DEFAULT_PART_SIZE_MB, partSizeBytes); + assertEquals(MB * DEFAULT_PART_SIZE_MB, partSizeBytes); } } diff --git a/airbyte-integrations/connectors/destination-gcs/src/test/java/io/airbyte/integrations/destination/gcs/csv/GcsCsvFormatConfigTest.java b/airbyte-integrations/connectors/destination-gcs/src/test/java/io/airbyte/integrations/destination/gcs/csv/GcsCsvFormatConfigTest.java index 6df74ec8ca2c..56b948967fe1 100644 --- a/airbyte-integrations/connectors/destination-gcs/src/test/java/io/airbyte/integrations/destination/gcs/csv/GcsCsvFormatConfigTest.java +++ b/airbyte-integrations/connectors/destination-gcs/src/test/java/io/airbyte/integrations/destination/gcs/csv/GcsCsvFormatConfigTest.java @@ -5,6 +5,7 @@ package io.airbyte.integrations.destination.gcs.csv; import static com.amazonaws.services.s3.internal.Constants.MB; +import static io.airbyte.integrations.destination.s3.util.StreamTransferManagerFactory.DEFAULT_PART_SIZE_MB; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -13,7 +14,6 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.destination.gcs.GcsDestinationConfig; import io.airbyte.integrations.destination.gcs.util.ConfigTestUtils; -import io.airbyte.integrations.destination.s3.S3DestinationConstants; import io.airbyte.integrations.destination.s3.S3FormatConfig; import io.airbyte.integrations.destination.s3.csv.S3CsvFormatConfig.Flattening; import io.airbyte.integrations.destination.s3.util.StreamTransferManagerFactory; @@ -41,8 +41,7 @@ public void testHandlePartSizeConfig() throws IllegalAccessException { final JsonNode config = ConfigTestUtils.getBaseConfig(Jsons.deserialize("{\n" + " \"format_type\": \"CSV\",\n" - + " \"flattening\": \"Root level flattening\",\n" - + " \"part_size_mb\": 6\n" + + " \"flattening\": \"Root level flattening\"\n" + "}")); final GcsDestinationConfig gcsDestinationConfig = GcsDestinationConfig.getGcsDestinationConfig(config); @@ -50,15 +49,13 @@ public void testHandlePartSizeConfig() throws IllegalAccessException { final S3FormatConfig formatConfig = gcsDestinationConfig.getFormatConfig(); assertEquals("CSV", formatConfig.getFormat().name()); - assertEquals(6, formatConfig.getPartSize()); // Assert that is set properly in config final StreamTransferManager streamTransferManager = StreamTransferManagerFactory .create(gcsDestinationConfig.getBucketName(), "objectKey", null) - .setPartSize(gcsDestinationConfig.getFormatConfig().getPartSize()) .get(); final Integer partSizeBytes = (Integer) FieldUtils.readField(streamTransferManager, "partSize", true); - assertEquals(MB * 6, partSizeBytes); + assertEquals(MB * DEFAULT_PART_SIZE_MB, partSizeBytes); } @Test @@ -74,11 +71,10 @@ public void testHandleAbsenceOfPartSizeConfig() throws IllegalAccessException { final StreamTransferManager streamTransferManager = StreamTransferManagerFactory .create(gcsDestinationConfig.getBucketName(), "objectKey", null) - .setPartSize(gcsDestinationConfig.getFormatConfig().getPartSize()) .get(); final Integer partSizeBytes = (Integer) FieldUtils.readField(streamTransferManager, "partSize", true); - assertEquals(MB * S3DestinationConstants.DEFAULT_PART_SIZE_MB, partSizeBytes); + assertEquals(MB * DEFAULT_PART_SIZE_MB, partSizeBytes); } } diff --git a/airbyte-integrations/connectors/destination-gcs/src/test/java/io/airbyte/integrations/destination/gcs/jsonl/GcsJsonlFormatConfigTest.java b/airbyte-integrations/connectors/destination-gcs/src/test/java/io/airbyte/integrations/destination/gcs/jsonl/GcsJsonlFormatConfigTest.java index aa89beeb318a..8b8ddbb08a24 100644 --- a/airbyte-integrations/connectors/destination-gcs/src/test/java/io/airbyte/integrations/destination/gcs/jsonl/GcsJsonlFormatConfigTest.java +++ b/airbyte-integrations/connectors/destination-gcs/src/test/java/io/airbyte/integrations/destination/gcs/jsonl/GcsJsonlFormatConfigTest.java @@ -5,6 +5,7 @@ package io.airbyte.integrations.destination.gcs.jsonl; import static com.amazonaws.services.s3.internal.Constants.MB; +import static io.airbyte.integrations.destination.s3.util.StreamTransferManagerFactory.DEFAULT_PART_SIZE_MB; import static org.junit.jupiter.api.Assertions.assertEquals; import alex.mojaki.s3upload.StreamTransferManager; @@ -12,7 +13,6 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.destination.gcs.GcsDestinationConfig; import io.airbyte.integrations.destination.gcs.util.ConfigTestUtils; -import io.airbyte.integrations.destination.s3.S3DestinationConstants; import io.airbyte.integrations.destination.s3.S3FormatConfig; import io.airbyte.integrations.destination.s3.util.StreamTransferManagerFactory; import org.apache.commons.lang3.reflect.FieldUtils; @@ -26,8 +26,7 @@ public class GcsJsonlFormatConfigTest { public void testHandlePartSizeConfig() throws IllegalAccessException { final JsonNode config = ConfigTestUtils.getBaseConfig(Jsons.deserialize("{\n" - + " \"format_type\": \"JSONL\",\n" - + " \"part_size_mb\": 6\n" + + " \"format_type\": \"JSONL\"\n" + "}")); final GcsDestinationConfig gcsDestinationConfig = GcsDestinationConfig @@ -36,16 +35,14 @@ public void testHandlePartSizeConfig() throws IllegalAccessException { final S3FormatConfig formatConfig = gcsDestinationConfig.getFormatConfig(); assertEquals("JSONL", formatConfig.getFormat().name()); - assertEquals(6, formatConfig.getPartSize()); // Assert that is set properly in config final StreamTransferManager streamTransferManager = StreamTransferManagerFactory .create(gcsDestinationConfig.getBucketName(), "objectKey", null) - .setPartSize(gcsDestinationConfig.getFormatConfig().getPartSize()) .get(); final Integer partSizeBytes = (Integer) FieldUtils.readField(streamTransferManager, "partSize", true); - assertEquals(MB * 6, partSizeBytes); + assertEquals(MB * DEFAULT_PART_SIZE_MB, partSizeBytes); } @Test @@ -61,11 +58,10 @@ public void testHandleAbsenceOfPartSizeConfig() throws IllegalAccessException { final StreamTransferManager streamTransferManager = StreamTransferManagerFactory .create(gcsDestinationConfig.getBucketName(), "objectKey", null) - .setPartSize(gcsDestinationConfig.getFormatConfig().getPartSize()) .get(); final Integer partSizeBytes = (Integer) FieldUtils.readField(streamTransferManager, "partSize", true); - assertEquals(MB * S3DestinationConstants.DEFAULT_PART_SIZE_MB, partSizeBytes); + assertEquals(MB * DEFAULT_PART_SIZE_MB, partSizeBytes); } } diff --git a/airbyte-integrations/connectors/destination-jdbc/Dockerfile b/airbyte-integrations/connectors/destination-jdbc/Dockerfile index aa9e1177a2b1..a35e7fb7b3f2 100644 --- a/airbyte-integrations/connectors/destination-jdbc/Dockerfile +++ b/airbyte-integrations/connectors/destination-jdbc/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-jdbc COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.3.13 +LABEL io.airbyte.version=0.3.14 LABEL io.airbyte.name=airbyte/destination-jdbc diff --git a/airbyte-integrations/connectors/destination-jdbc/build.gradle b/airbyte-integrations/connectors/destination-jdbc/build.gradle index f4f2150be85b..b73f59604c0e 100644 --- a/airbyte-integrations/connectors/destination-jdbc/build.gradle +++ b/airbyte-integrations/connectors/destination-jdbc/build.gradle @@ -23,11 +23,11 @@ dependencies { // https://github.com/aesy/datasize implementation "io.aesy:datasize:1.0.0" - testImplementation libs.testcontainers.postgresql + testImplementation libs.connectors.testcontainers.postgresql testImplementation "org.mockito:mockito-inline:4.1.0" integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') - integrationTestJavaImplementation libs.testcontainers.postgresql + integrationTestJavaImplementation libs.connectors.testcontainers.postgresql implementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) integrationTestJavaImplementation files(project(':airbyte-integrations:bases:base-normalization').airbyteDocker.outputs) diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/JdbcBufferedConsumerFactory.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/JdbcBufferedConsumerFactory.java index 3cbda6b7be68..416f33553e54 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/JdbcBufferedConsumerFactory.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/JdbcBufferedConsumerFactory.java @@ -5,7 +5,6 @@ package io.airbyte.integrations.destination.jdbc; import static io.airbyte.integrations.destination.jdbc.constants.GlobalDataSizeConstants.DEFAULT_MAX_BATCH_SIZE_BYTES; -import static java.util.stream.Collectors.toSet; import com.fasterxml.jackson.databind.JsonNode; import com.google.common.base.Preconditions; @@ -104,7 +103,7 @@ private static Function toWriteConfig( /** * Defer to the {@link AirbyteStream}'s namespace. If this is not set, use the destination's default * schema. This namespace is source-provided, and can be potentially empty. - * + *

* The logic here matches the logic in the catalog_process.py for Normalization. Any modifications * need to be reflected there and vice versa. */ @@ -159,7 +158,7 @@ private static OnCloseFunction onCloseFunction(final JdbcDatabase database, // copy data if (!hasFailed) { final List queryList = new ArrayList<>(); - sqlOperations.onDestinationCloseOperations(database, writeConfigs.stream().map(WriteConfig::getOutputSchemaName).collect(toSet())); + sqlOperations.onDestinationCloseOperations(database, writeConfigs); LOGGER.info("Finalizing tables in destination started for {} streams", writeConfigs.size()); for (final WriteConfig writeConfig : writeConfigs) { final String schemaName = writeConfig.getOutputSchemaName(); @@ -193,7 +192,9 @@ private static OnCloseFunction onCloseFunction(final JdbcDatabase database, sqlOperations.dropTableIfExists(database, schemaName, tmpTableName); } LOGGER.info("Cleaning tmp tables in destination completed."); - }; + } + + ; } private static AirbyteStreamNameNamespacePair toNameNamespacePair(final WriteConfig config) { diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/SqlOperations.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/SqlOperations.java index 77be0d088239..07104b5b9f66 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/SqlOperations.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/SqlOperations.java @@ -8,7 +8,6 @@ import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.protocol.models.AirbyteRecordMessage; import java.util.List; -import java.util.Set; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -130,10 +129,10 @@ default boolean isSchemaExists(final JdbcDatabase database, final String schemaN * Redshift destination: * * @param database - Database that the connector is interacting with - * @param schemaNames - schemas will be discovered + * @param writeConfigs - schemas and tables (streams) will be discovered * @see io.airbyte.integrations.destination.redshift.RedshiftSqlOperations#onDestinationCloseOperations */ - default void onDestinationCloseOperations(JdbcDatabase database, Set schemaNames) { + default void onDestinationCloseOperations(final JdbcDatabase database, final List writeConfigs) { // do nothing LOGGER.info("No onDestinationCloseOperations required for this destination."); } diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/CopyConsumerFactory.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/CopyConsumerFactory.java index 12bdfa66ae49..6d8783ec12bf 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/CopyConsumerFactory.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/CopyConsumerFactory.java @@ -5,7 +5,6 @@ package io.airbyte.integrations.destination.jdbc.copy; import static io.airbyte.integrations.destination.jdbc.constants.GlobalDataSizeConstants.DEFAULT_MAX_BATCH_SIZE_BYTES; -import static java.util.stream.Collectors.toSet; import io.airbyte.db.factory.DataSourceFactory; import io.airbyte.db.jdbc.JdbcDatabase; @@ -162,8 +161,6 @@ private static void closeAsOneTransaction(final Map resolveIdentifier(final String identifier) { @Override protected void setup(final TestDestinationEnv testEnv) throws Exception { - bastion.initAndStartBastion(); + bastion.initAndStartBastion(network); startAndInitJdbcContainer(); } private void startAndInitJdbcContainer() throws Exception { final DockerImageName mcsImage = DockerImageName.parse("fengdi/columnstore:1.5.2").asCompatibleSubstituteFor("mariadb"); db = new MariaDBContainer<>(mcsImage) - .withNetwork(bastion.getNetWork()); + .withNetwork(network); db.start(); final String createUser = String.format("CREATE USER '%s'@'%%' IDENTIFIED BY '%s';", db.getUsername(), db.getPassword()); diff --git a/airbyte-integrations/connectors/destination-meilisearch/Dockerfile b/airbyte-integrations/connectors/destination-meilisearch/Dockerfile index 0abb9ddc620f..b664503d7ba5 100644 --- a/airbyte-integrations/connectors/destination-meilisearch/Dockerfile +++ b/airbyte-integrations/connectors/destination-meilisearch/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-meilisearch COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.2.12 +LABEL io.airbyte.version=0.2.13 LABEL io.airbyte.name=airbyte/destination-meilisearch diff --git a/airbyte-integrations/connectors/destination-meilisearch/build.gradle b/airbyte-integrations/connectors/destination-meilisearch/build.gradle index f47229ce7eb7..9290baeddb9e 100644 --- a/airbyte-integrations/connectors/destination-meilisearch/build.gradle +++ b/airbyte-integrations/connectors/destination-meilisearch/build.gradle @@ -20,7 +20,7 @@ dependencies { integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-meilisearch') - integrationTestJavaImplementation libs.testcontainers + integrationTestJavaImplementation libs.connectors.testcontainers implementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) } diff --git a/airbyte-integrations/connectors/destination-mongodb-strict-encrypt/build.gradle b/airbyte-integrations/connectors/destination-mongodb-strict-encrypt/build.gradle index 03d939cec48a..287ac8afe5af 100644 --- a/airbyte-integrations/connectors/destination-mongodb-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/destination-mongodb-strict-encrypt/build.gradle @@ -18,7 +18,7 @@ dependencies { implementation project(':airbyte-integrations:connectors:destination-mongodb') implementation 'org.mongodb:mongodb-driver-sync:4.3.0' - testImplementation libs.testcontainers.mongodb + testImplementation libs.connectors.testcontainers.mongodb integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-mongodb-strict-encrypt') integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') diff --git a/airbyte-integrations/connectors/destination-mongodb/build.gradle b/airbyte-integrations/connectors/destination-mongodb/build.gradle index bab4fa8e6da9..4c328ddedf93 100644 --- a/airbyte-integrations/connectors/destination-mongodb/build.gradle +++ b/airbyte-integrations/connectors/destination-mongodb/build.gradle @@ -18,7 +18,7 @@ dependencies { implementation 'org.mongodb:mongodb-driver-sync:4.3.0' - testImplementation libs.testcontainers.mongodb + testImplementation libs.connectors.testcontainers.mongodb integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-mongodb') integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') diff --git a/airbyte-integrations/connectors/destination-mongodb/src/test-integration/java/io/airbyte/integrations/destination/mongodb/MongodbDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-mongodb/src/test-integration/java/io/airbyte/integrations/destination/mongodb/MongodbDestinationAcceptanceTest.java index 2115990996e8..49dcaadfa742 100644 --- a/airbyte-integrations/connectors/destination-mongodb/src/test-integration/java/io/airbyte/integrations/destination/mongodb/MongodbDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-mongodb/src/test-integration/java/io/airbyte/integrations/destination/mongodb/MongodbDestinationAcceptanceTest.java @@ -111,26 +111,6 @@ protected void tearDown(final TestDestinationEnv testEnv) { container.close(); } - @Override - protected TestDataComparator getTestDataComparator() { - return new AdvancedTestDataComparator(); - } - - @Override - protected boolean supportBasicDataTypeTest() { - return true; - } - - @Override - protected boolean supportArrayDataTypeTest() { - return true; - } - - @Override - protected boolean supportObjectDataTypeTest() { - return true; - } - /* Helpers */ private JsonNode getAuthTypeConfig() { diff --git a/airbyte-integrations/connectors/destination-mssql-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/destination-mssql-strict-encrypt/Dockerfile index f50b17ed7ab5..85280d2b9189 100644 --- a/airbyte-integrations/connectors/destination-mssql-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/destination-mssql-strict-encrypt/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-mssql-strict-encrypt COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.8 +LABEL io.airbyte.version=0.1.9 LABEL io.airbyte.name=airbyte/destination-mssql-strict-encrypt diff --git a/airbyte-integrations/connectors/destination-mssql-strict-encrypt/build.gradle b/airbyte-integrations/connectors/destination-mssql-strict-encrypt/build.gradle index a6752a92115f..3f12dcac7cc2 100644 --- a/airbyte-integrations/connectors/destination-mssql-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/destination-mssql-strict-encrypt/build.gradle @@ -21,7 +21,7 @@ dependencies { implementation 'com.microsoft.sqlserver:mssql-jdbc:8.4.1.jre14' testImplementation 'org.apache.commons:commons-lang3:3.11' - testImplementation libs.testcontainers.mssqlserver + testImplementation libs.connectors.testcontainers.mssqlserver integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-mssql-strict-encrypt') diff --git a/airbyte-integrations/connectors/destination-mssql/build.gradle b/airbyte-integrations/connectors/destination-mssql/build.gradle index cb36b26b9e5d..ff95ba711b84 100644 --- a/airbyte-integrations/connectors/destination-mssql/build.gradle +++ b/airbyte-integrations/connectors/destination-mssql/build.gradle @@ -19,7 +19,7 @@ dependencies { implementation 'com.microsoft.sqlserver:mssql-jdbc:8.4.1.jre14' testImplementation 'org.apache.commons:commons-lang3:3.11' - testImplementation libs.testcontainers.mssqlserver + testImplementation libs.connectors.testcontainers.mssqlserver integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-mssql') diff --git a/airbyte-integrations/connectors/destination-mssql/src/main/java/io/airbyte/integrations/destination/mssql/MSSQLDestination.java b/airbyte-integrations/connectors/destination-mssql/src/main/java/io/airbyte/integrations/destination/mssql/MSSQLDestination.java index 35a57bc65cee..9b13bde030ca 100644 --- a/airbyte-integrations/connectors/destination-mssql/src/main/java/io/airbyte/integrations/destination/mssql/MSSQLDestination.java +++ b/airbyte-integrations/connectors/destination-mssql/src/main/java/io/airbyte/integrations/destination/mssql/MSSQLDestination.java @@ -76,7 +76,7 @@ public JsonNode toJdbcConfig(final JsonNode config) { .put("schema", schema); if (config.has(JDBC_URL_PARAMS_KEY)) { - //configBuilder.put("connection_properties", config.get(JDBC_URL_PARAMS_KEY)); + // configBuilder.put("connection_properties", config.get(JDBC_URL_PARAMS_KEY)); configBuilder.put(JDBC_URL_PARAMS_KEY, config.get(JDBC_URL_PARAMS_KEY)); } diff --git a/airbyte-integrations/connectors/destination-mssql/src/test-integration/java/io/airbyte/integrations/destination/mssql/SshMSSQLDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-mssql/src/test-integration/java/io/airbyte/integrations/destination/mssql/SshMSSQLDestinationAcceptanceTest.java index b0f295d5a07a..d8082c6d4473 100644 --- a/airbyte-integrations/connectors/destination-mssql/src/test-integration/java/io/airbyte/integrations/destination/mssql/SshMSSQLDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-mssql/src/test-integration/java/io/airbyte/integrations/destination/mssql/SshMSSQLDestinationAcceptanceTest.java @@ -39,6 +39,7 @@ public abstract class SshMSSQLDestinationAcceptanceTest extends DestinationAccep private final String schemaName = RandomStringUtils.randomAlphabetic(8).toLowerCase(); private static final String database = "test"; private static MSSQLServerContainer db; + private static final Network network = Network.newNetwork(); private final SshBastionContainer bastion = new SshBastionContainer(); public abstract SshTunnel.TunnelMethod getTunnelMethod(); @@ -111,7 +112,7 @@ public ImmutableMap.Builder getMSSQLDbConfigBuilder(final JdbcDa return ImmutableMap.builder() .put("host", Objects.requireNonNull(db.getContainerInfo().getNetworkSettings() .getNetworks() - .get(((Network.NetworkImpl) bastion.getNetWork()).getName()) + .get(((Network.NetworkImpl) network).getName()) .getIpAddress())) .put("username", db.getUsername()) .put("password", db.getPassword()) @@ -173,13 +174,13 @@ protected void setup(final TestDestinationEnv testEnv) throws Exception { } private void startTestContainers() { - bastion.initAndStartBastion(); + bastion.initAndStartBastion(network); initAndStartJdbcContainer(); } private void initAndStartJdbcContainer() { db = new MSSQLServerContainer<>("mcr.microsoft.com/mssql/server:2019-GA-ubuntu-16.04") - .withNetwork(bastion.getNetWork()) + .withNetwork(network) .acceptLicense(); db.start(); } diff --git a/airbyte-integrations/connectors/destination-mssql/src/test/java/io/airbyte/integrations/destination/mssql/MSSQLDestinationTest.java b/airbyte-integrations/connectors/destination-mssql/src/test/java/io/airbyte/integrations/destination/mssql/MSSQLDestinationTest.java index ee85c8b2c2ea..27b7e4dd5542 100644 --- a/airbyte-integrations/connectors/destination-mssql/src/test/java/io/airbyte/integrations/destination/mssql/MSSQLDestinationTest.java +++ b/airbyte-integrations/connectors/destination-mssql/src/test/java/io/airbyte/integrations/destination/mssql/MSSQLDestinationTest.java @@ -191,24 +191,24 @@ void testExtraParams() { private JsonNode buildConfigNoJdbcParameters() { return Jsons.jsonNode(com.google.common.collect.ImmutableMap.of( - "ssl_method", "ssl_method", - "host", "localhost", - "port", "1773", - "database", "db", - "username", "username", - "password", "verysecure")); + "ssl_method", "ssl_method", + "host", "localhost", + "port", "1773", + "database", "db", + "username", "username", + "password", "verysecure")); } private JsonNode buildConfigWithExtraJdbcParameters(String extraParam) { return Jsons.jsonNode(com.google.common.collect.ImmutableMap.of( - "ssl_method", "ssl_method", - "host", "localhost", - "port", "1773", - "database", "db", - "username", "username", - "password", "verysecure", - "jdbc_url_params", extraParam)); + "ssl_method", "ssl_method", + "host", "localhost", + "port", "1773", + "database", "db", + "username", "username", + "password", "verysecure", + "jdbc_url_params", extraParam)); } } diff --git a/airbyte-integrations/connectors/destination-mysql-strict-encrypt/build.gradle b/airbyte-integrations/connectors/destination-mysql-strict-encrypt/build.gradle index 434a77a518da..2b28507b7a42 100644 --- a/airbyte-integrations/connectors/destination-mysql-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/destination-mysql-strict-encrypt/build.gradle @@ -20,7 +20,7 @@ dependencies { integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-mysql') - integrationTestJavaImplementation libs.testcontainers.mysql + integrationTestJavaImplementation libs.connectors.testcontainers.mysql implementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) integrationTestJavaImplementation files(project(':airbyte-integrations:bases:base-normalization').airbyteDocker.outputs) diff --git a/airbyte-integrations/connectors/destination-mysql-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/mysql/MySQLStrictEncryptDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-mysql-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/mysql/MySQLStrictEncryptDestinationAcceptanceTest.java index 0351cdf7bf81..b4bc62785b3f 100644 --- a/airbyte-integrations/connectors/destination-mysql-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/mysql/MySQLStrictEncryptDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-mysql-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/mysql/MySQLStrictEncryptDestinationAcceptanceTest.java @@ -12,10 +12,11 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; import io.airbyte.db.factory.DSLContextFactory; -import io.airbyte.db.jdbc.JdbcUtils; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.base.JavaBaseConstants; import io.airbyte.integrations.destination.ExtendedNameTransformer; -import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; +import io.airbyte.integrations.standardtest.destination.JdbcDestinationAcceptanceTest; +import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import io.airbyte.protocol.models.AirbyteCatalog; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteMessage.Type; @@ -25,7 +26,6 @@ import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import java.sql.SQLException; import java.time.Instant; -import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; import org.jooq.DSLContext; @@ -33,7 +33,7 @@ import org.junit.jupiter.api.Test; import org.testcontainers.containers.MySQLContainer; -public class MySQLStrictEncryptDestinationAcceptanceTest extends DestinationAcceptanceTest { +public class MySQLStrictEncryptDestinationAcceptanceTest extends JdbcDestinationAcceptanceTest { private MySQLContainer db; private final ExtendedNameTransformer namingResolver = new MySQLNameTransformer(); @@ -58,6 +58,26 @@ protected boolean supportsNormalization() { return true; } + @Override + protected TestDataComparator getTestDataComparator() { + return new MySqlTestDataComparator(); + } + + @Override + protected boolean supportBasicDataTypeTest() { + return true; + } + + @Override + protected boolean supportArrayDataTypeTest() { + return true; + } + + @Override + protected boolean supportObjectDataTypeTest() { + return true; + } + @Override protected JsonNode getConfig() { return Jsons.jsonNode(ImmutableMap.builder() @@ -96,28 +116,28 @@ protected List retrieveRecords(final TestDestinationEnv testEnv, throws Exception { return retrieveRecordsFromTable(namingResolver.getRawTableName(streamName), namespace) .stream() - .map(r -> Jsons.deserialize(r.get(JavaBaseConstants.COLUMN_NAME_DATA).asText())) + .map(r -> r.get(JavaBaseConstants.COLUMN_NAME_DATA)) .collect(Collectors.toList()); } private List retrieveRecordsFromTable(final String tableName, final String schemaName) throws SQLException { - final DSLContext dslContext = DSLContextFactory.create( + try (final DSLContext dslContext = DSLContextFactory.create( db.getUsername(), db.getPassword(), db.getDriverClassName(), - String.format("jdbc:mysql://%s:%s/%s?useSSL=true&requireSSL=true&verifyServerCertificate=false", + String.format(DatabaseDriver.MYSQL.getUrlFormatString(), db.getHost(), db.getFirstMappedPort(), db.getDatabaseName()), - SQLDialect.MYSQL); - return new Database(dslContext).query( - ctx -> ctx - .fetch(String.format("SELECT * FROM %s.%s ORDER BY %s ASC;", schemaName, tableName, - JavaBaseConstants.COLUMN_NAME_EMITTED_AT)) - .stream() - .map(r -> r.formatJSON(JdbcUtils.getDefaultJSONFormat())) - .map(Jsons::deserialize) - .collect(Collectors.toList())); + SQLDialect.MYSQL)) { + return new Database(dslContext).query( + ctx -> ctx + .fetch(String.format("SELECT * FROM %s.%s ORDER BY %s ASC;", schemaName, tableName, + JavaBaseConstants.COLUMN_NAME_EMITTED_AT)) + .stream() + .map(this::getJsonFromRecord) + .collect(Collectors.toList())); + } } @Override @@ -128,18 +148,6 @@ protected List retrieveNormalizedRecords(final TestDestinationEnv test return retrieveRecordsFromTable(tableName, schema); } - @Override - protected List resolveIdentifier(final String identifier) { - final List result = new ArrayList<>(); - final String resolved = namingResolver.getIdentifier(identifier); - result.add(identifier); - result.add(resolved); - if (!resolved.startsWith("\"")) { - result.add(resolved.toLowerCase()); - } - return result; - } - @Override protected void setup(final TestDestinationEnv testEnv) { db = new MySQLContainer<>("mysql:8.0"); @@ -163,10 +171,10 @@ private void grantCorrectPermissions() { private void executeQuery(final String query) { try (final DSLContext dslContext = DSLContextFactory.create( - db.getUsername(), - db.getPassword(), + "root", + "test", db.getDriverClassName(), - String.format("jdbc:mysql://%s:%s/%s?useSSL=true&requireSSL=true&verifyServerCertificate=false", + String.format(DatabaseDriver.MYSQL.getUrlFormatString(), db.getHost(), db.getFirstMappedPort(), db.getDatabaseName()), @@ -187,9 +195,10 @@ protected void tearDown(final TestDestinationEnv testEnv) { @Override @Test - public void testCustomDbtTransformations() { + public void testCustomDbtTransformations() throws Exception { // We need to create view for testing custom dbt transformations executeQuery("GRANT CREATE VIEW ON *.* TO " + db.getUsername() + "@'%';"); + super.testCustomDbtTransformations(); } @Test diff --git a/airbyte-integrations/connectors/destination-mysql-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/mysql/MySqlTestDataComparator.java b/airbyte-integrations/connectors/destination-mysql-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/mysql/MySqlTestDataComparator.java new file mode 100644 index 000000000000..e2526da095a0 --- /dev/null +++ b/airbyte-integrations/connectors/destination-mysql-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/mysql/MySqlTestDataComparator.java @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.mysql; + +import io.airbyte.integrations.destination.ExtendedNameTransformer; +import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; +import java.util.ArrayList; +import java.util.List; + +public class MySqlTestDataComparator extends AdvancedTestDataComparator { + + private final ExtendedNameTransformer namingResolver = new MySQLNameTransformer(); + + @Override + protected List resolveIdentifier(final String identifier) { + final List result = new ArrayList<>(); + final String resolved = namingResolver.getIdentifier(identifier); + result.add(identifier); + result.add(resolved); + if (!resolved.startsWith("\"")) { + result.add(resolved.toLowerCase()); + } + return result; + } + + @Override + protected boolean compareBooleanValues(String firstBooleanValue, String secondBooleanValue) { + if (secondBooleanValue.equalsIgnoreCase("true") || secondBooleanValue.equalsIgnoreCase("false")) { + return super.compareBooleanValues(firstBooleanValue, secondBooleanValue); + } else { + return super.compareBooleanValues(firstBooleanValue, String.valueOf(secondBooleanValue.equals("1"))); + } + } + +} diff --git a/airbyte-integrations/connectors/destination-mysql/Dockerfile b/airbyte-integrations/connectors/destination-mysql/Dockerfile index bc324b2bff11..29fa71d00ceb 100644 --- a/airbyte-integrations/connectors/destination-mysql/Dockerfile +++ b/airbyte-integrations/connectors/destination-mysql/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-mysql COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.18 +LABEL io.airbyte.version=0.1.20 LABEL io.airbyte.name=airbyte/destination-mysql diff --git a/airbyte-integrations/connectors/destination-mysql/build.gradle b/airbyte-integrations/connectors/destination-mysql/build.gradle index d427a915ae64..9434a5c06812 100644 --- a/airbyte-integrations/connectors/destination-mysql/build.gradle +++ b/airbyte-integrations/connectors/destination-mysql/build.gradle @@ -19,7 +19,7 @@ dependencies { integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-mysql') - integrationTestJavaImplementation 'org.testcontainers:mysql:1.15.3' + integrationTestJavaImplementation libs.connectors.testcontainers.mysql implementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) integrationTestJavaImplementation files(project(':airbyte-integrations:bases:base-normalization').airbyteDocker.outputs) diff --git a/airbyte-integrations/connectors/destination-oracle-strict-encrypt/build.gradle b/airbyte-integrations/connectors/destination-oracle-strict-encrypt/build.gradle index 3dbff25da67f..58f5ff7cf493 100644 --- a/airbyte-integrations/connectors/destination-oracle-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/destination-oracle-strict-encrypt/build.gradle @@ -25,7 +25,7 @@ dependencies { testImplementation project(':airbyte-test-utils') testImplementation 'org.apache.commons:commons-lang3:3.11' - testImplementation libs.testcontainers.oracle.xe + testImplementation libs.connectors.destination.testcontainers.oracle.xe integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-oracle') diff --git a/airbyte-integrations/connectors/destination-oracle/build.gradle b/airbyte-integrations/connectors/destination-oracle/build.gradle index aef61deeb49f..df29788e3aa8 100644 --- a/airbyte-integrations/connectors/destination-oracle/build.gradle +++ b/airbyte-integrations/connectors/destination-oracle/build.gradle @@ -22,7 +22,7 @@ dependencies { implementation "com.oracle.database.jdbc:ojdbc8-production:19.7.0.0" testImplementation 'org.apache.commons:commons-lang3:3.11' - testImplementation 'org.testcontainers:oracle-xe:1.16.0' + testImplementation libs.connectors.destination.testcontainers.oracle.xe integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-oracle') diff --git a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/SshOracleDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/SshOracleDestinationAcceptanceTest.java index 64a4968f7a31..c3f94a580d6b 100644 --- a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/SshOracleDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/SshOracleDestinationAcceptanceTest.java @@ -34,6 +34,8 @@ public abstract class SshOracleDestinationAcceptanceTest extends DestinationAcce private final SshBastionContainer sshBastionContainer = new SshBastionContainer(); + private static final Network network = Network.newNetwork(); + private OracleContainer db; public abstract SshTunnel.TunnelMethod getTunnelMethod(); @@ -53,7 +55,7 @@ public ImmutableMap.Builder getBasicOracleDbConfigBuilder(final return ImmutableMap.builder() .put("host", Objects.requireNonNull(db.getContainerInfo().getNetworkSettings() .getNetworks() - .get(((Network.NetworkImpl) sshBastionContainer.getNetWork()).getName()) + .get(((Network.NetworkImpl) network).getName()) .getIpAddress())) .put("username", db.getUsername()) .put("password", db.getPassword()) @@ -143,7 +145,7 @@ protected void setup(final TestDestinationEnv testEnv) throws Exception { } private void startTestContainers() { - sshBastionContainer.initAndStartBastion(); + sshBastionContainer.initAndStartBastion(network); initAndStartJdbcContainer(); } @@ -152,7 +154,7 @@ private void initAndStartJdbcContainer() { .withUsername("test") .withPassword("oracle") .usingSid() - .withNetwork(sshBastionContainer.getNetWork()); + .withNetwork(network); db.start(); } diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/Dockerfile index 7254fbcd0b6d..81a06278f614 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-postgres-strict-encrypt COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.6 +LABEL io.airbyte.version=0.1.7 LABEL io.airbyte.name=airbyte/destination-postgres-strict-encrypt diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/build.gradle b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/build.gradle index 1a3d383396c7..1008bad5c207 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/build.gradle @@ -18,7 +18,7 @@ dependencies { integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') - integrationTestJavaImplementation libs.testcontainers.postgresql + integrationTestJavaImplementation libs.connectors.testcontainers.postgresql implementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) integrationTestJavaImplementation files(project(':airbyte-integrations:bases:base-normalization').airbyteDocker.outputs) diff --git a/airbyte-integrations/connectors/destination-postgres/build.gradle b/airbyte-integrations/connectors/destination-postgres/build.gradle index 5cd1d9967275..06a8e9f2a69a 100644 --- a/airbyte-integrations/connectors/destination-postgres/build.gradle +++ b/airbyte-integrations/connectors/destination-postgres/build.gradle @@ -17,12 +17,12 @@ dependencies { testImplementation project(':airbyte-test-utils') - testImplementation libs.testcontainers.postgresql + testImplementation libs.connectors.testcontainers.postgresql integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-postgres') - integrationTestJavaImplementation libs.testcontainers.postgresql + integrationTestJavaImplementation libs.connectors.testcontainers.postgresql implementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) integrationTestJavaImplementation files(project(':airbyte-integrations:bases:base-normalization').airbyteDocker.outputs) diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/java/io/airbyte/integrations/destination/postgres/SshPostgresDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-postgres/src/test-integration/java/io/airbyte/integrations/destination/postgres/SshPostgresDestinationAcceptanceTest.java index 668f7bbc0303..53bfd6e31d7d 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/test-integration/java/io/airbyte/integrations/destination/postgres/SshPostgresDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/java/io/airbyte/integrations/destination/postgres/SshPostgresDestinationAcceptanceTest.java @@ -21,6 +21,7 @@ import java.util.stream.Collectors; import org.apache.commons.lang3.RandomStringUtils; import org.jooq.SQLDialect; +import org.testcontainers.containers.Network; import org.testcontainers.containers.PostgreSQLContainer; // todo (cgardens) - likely some of this could be further de-duplicated with @@ -34,6 +35,7 @@ public abstract class SshPostgresDestinationAcceptanceTest extends JdbcDestinati private final ExtendedNameTransformer namingResolver = new ExtendedNameTransformer(); private static final String schemaName = RandomStringUtils.randomAlphabetic(8).toLowerCase(); + private static final Network network = Network.newNetwork(); private static PostgreSQLContainer db; private final SshBastionContainer bastion = new SshBastionContainer(); @@ -154,13 +156,13 @@ protected void setup(final TestDestinationEnv testEnv) throws Exception { } private void startTestContainers() { - bastion.initAndStartBastion(); + bastion.initAndStartBastion(network); initAndStartJdbcContainer(); } private void initAndStartJdbcContainer() { db = new PostgreSQLContainer<>("postgres:13-alpine") - .withNetwork(bastion.getNetWork()); + .withNetwork(network); db.start(); } diff --git a/airbyte-integrations/connectors/destination-pubsub/Dockerfile b/airbyte-integrations/connectors/destination-pubsub/Dockerfile index 7ea54f54929d..4bd1e25450c1 100644 --- a/airbyte-integrations/connectors/destination-pubsub/Dockerfile +++ b/airbyte-integrations/connectors/destination-pubsub/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-pubsub COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.4 +LABEL io.airbyte.version=0.1.5 LABEL io.airbyte.name=airbyte/destination-pubsub diff --git a/airbyte-integrations/connectors/destination-pulsar/Dockerfile b/airbyte-integrations/connectors/destination-pulsar/Dockerfile index 809e5da68391..c96c68c6c3f2 100644 --- a/airbyte-integrations/connectors/destination-pulsar/Dockerfile +++ b/airbyte-integrations/connectors/destination-pulsar/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-pulsar COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.1 +LABEL io.airbyte.version=0.1.2 LABEL io.airbyte.name=airbyte/destination-pulsar diff --git a/airbyte-integrations/connectors/destination-pulsar/build.gradle b/airbyte-integrations/connectors/destination-pulsar/build.gradle index 5e13d3f05c83..4cd167744463 100644 --- a/airbyte-integrations/connectors/destination-pulsar/build.gradle +++ b/airbyte-integrations/connectors/destination-pulsar/build.gradle @@ -18,7 +18,7 @@ dependencies { implementation 'org.apache.pulsar:pulsar-client:2.8.1' - testImplementation libs.testcontainers.pulsar + testImplementation libs.connectors.testcontainers.pulsar integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-pulsar') diff --git a/airbyte-integrations/connectors/destination-redis/Dockerfile b/airbyte-integrations/connectors/destination-redis/Dockerfile index 309a0d52e99f..c773173c33ab 100644 --- a/airbyte-integrations/connectors/destination-redis/Dockerfile +++ b/airbyte-integrations/connectors/destination-redis/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-redis COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.1 +LABEL io.airbyte.version=0.1.2 LABEL io.airbyte.name=airbyte/destination-redis diff --git a/airbyte-integrations/connectors/destination-redis/build.gradle b/airbyte-integrations/connectors/destination-redis/build.gradle index e9875db72f12..4f59448f5ed4 100644 --- a/airbyte-integrations/connectors/destination-redis/build.gradle +++ b/airbyte-integrations/connectors/destination-redis/build.gradle @@ -28,7 +28,7 @@ dependencies { // https://mvnrepository.com/artifact/org.assertj/assertj-core testImplementation "org.assertj:assertj-core:${assertVersion}" // https://mvnrepository.com/artifact/org.testcontainers/testcontainers - testImplementation libs.testcontainers + testImplementation libs.connectors.testcontainers integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-redis') diff --git a/airbyte-integrations/connectors/destination-redshift/Dockerfile b/airbyte-integrations/connectors/destination-redshift/Dockerfile index a6bf71cbe1eb..2f3a8a19ddad 100644 --- a/airbyte-integrations/connectors/destination-redshift/Dockerfile +++ b/airbyte-integrations/connectors/destination-redshift/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-redshift COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.3.37 +LABEL io.airbyte.version=0.3.46 LABEL io.airbyte.name=airbyte/destination-redshift diff --git a/airbyte-integrations/connectors/destination-redshift/README.md b/airbyte-integrations/connectors/destination-redshift/README.md index a24d4a65117e..0c7c6b73cc47 100644 --- a/airbyte-integrations/connectors/destination-redshift/README.md +++ b/airbyte-integrations/connectors/destination-redshift/README.md @@ -17,4 +17,8 @@ redshift.connString= redshift.user= redshift.pass= ``` +## Actual secrets +The actual secrets for integration tests could be found in Google Secrets Manager. It could be found by next labels: +- SECRET_DESTINATION-REDSHIFT__CREDS - used for Standard tests. (__config.json__) +- SECRET_DESTINATION-REDSHIFT_STAGING__CREDS - used for S3 Staging tests. (__config_staging.json__) diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftDestination.java b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftDestination.java index d13742cb0531..569022d9e03e 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftDestination.java +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftDestination.java @@ -4,6 +4,9 @@ package io.airbyte.integrations.destination.redshift; +import static io.airbyte.integrations.destination.redshift.util.RedshiftUtil.anyOfS3FieldsAreNullOrEmpty; +import static io.airbyte.integrations.destination.redshift.util.RedshiftUtil.findS3Options; + import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.integrations.base.Destination; import io.airbyte.integrations.base.IntegrationRunner; @@ -24,6 +27,11 @@ public class RedshiftDestination extends SwitchingDestination { private static final Logger LOGGER = LoggerFactory.getLogger(RedshiftDestination.class); + private static final String METHOD = "method"; + + private static final Map destinationMap = Map.of( + DestinationType.STANDARD, new RedshiftInsertDestination(), + DestinationType.COPY_S3, new RedshiftStagingS3Destination()); enum DestinationType { STANDARD, @@ -31,36 +39,22 @@ enum DestinationType { } public RedshiftDestination() { - super(DestinationType.class, RedshiftDestination::getTypeFromConfig, getTypeToDestination()); + super(DestinationType.class, RedshiftDestination::getTypeFromConfig, destinationMap); } - public static DestinationType getTypeFromConfig(final JsonNode config) { + private static DestinationType getTypeFromConfig(final JsonNode config) { return determineUploadMode(config); } - public static Map getTypeToDestination() { - return Map.of( - DestinationType.STANDARD, new RedshiftInsertDestination(), - DestinationType.COPY_S3, new RedshiftStagingS3Destination()); - } - public static DestinationType determineUploadMode(final JsonNode config) { - final var bucketNode = config.get("s3_bucket_name"); - final var regionNode = config.get("s3_bucket_region"); - final var accessKeyIdNode = config.get("access_key_id"); - final var secretAccessKeyNode = config.get("secret_access_key"); - if (isNullOrEmpty(bucketNode) && isNullOrEmpty(regionNode) && isNullOrEmpty(accessKeyIdNode) - && isNullOrEmpty(secretAccessKeyNode)) { + final JsonNode jsonNode = findS3Options(config); + + if (anyOfS3FieldsAreNullOrEmpty(jsonNode)) { LOGGER.warn("The \"standard\" upload mode is not performant, and is not recommended for production. " + "Please use the Amazon S3 upload mode if you are syncing a large amount of data."); return DestinationType.STANDARD; } - - if (isNullOrEmpty(bucketNode) && isNullOrEmpty(regionNode) && isNullOrEmpty(accessKeyIdNode) - && isNullOrEmpty(secretAccessKeyNode)) { - throw new RuntimeException("Error: Partially missing S3 Configuration."); - } return DestinationType.COPY_S3; } @@ -71,8 +65,4 @@ public static void main(final String[] args) throws Exception { LOGGER.info("completed destination: {}", RedshiftDestination.class); } - private static boolean isNullOrEmpty(JsonNode jsonNode) { - return jsonNode == null || jsonNode.asText().equals(""); - } - } diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStagingS3Destination.java b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStagingS3Destination.java index 63a30f615a39..5dd6d2313adb 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStagingS3Destination.java +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStagingS3Destination.java @@ -9,6 +9,7 @@ import static io.airbyte.integrations.destination.redshift.RedshiftInsertDestination.SSL_JDBC_PARAMETERS; import static io.airbyte.integrations.destination.redshift.RedshiftInsertDestination.USERNAME; import static io.airbyte.integrations.destination.redshift.RedshiftInsertDestination.getJdbcConfig; +import static io.airbyte.integrations.destination.redshift.util.RedshiftUtil.findS3Options; import static io.airbyte.integrations.destination.s3.S3DestinationConfig.getS3DestinationConfig; import com.fasterxml.jackson.databind.JsonNode; @@ -24,12 +25,17 @@ import io.airbyte.integrations.destination.record_buffer.FileBuffer; import io.airbyte.integrations.destination.redshift.operations.RedshiftS3StagingSqlOperations; import io.airbyte.integrations.destination.redshift.operations.RedshiftSqlOperations; +import io.airbyte.integrations.destination.s3.AesCbcEnvelopeEncryption; +import io.airbyte.integrations.destination.s3.AesCbcEnvelopeEncryption.KeyType; +import io.airbyte.integrations.destination.s3.EncryptionConfig; +import io.airbyte.integrations.destination.s3.NoEncryption; import io.airbyte.integrations.destination.s3.S3Destination; import io.airbyte.integrations.destination.s3.S3DestinationConfig; import io.airbyte.integrations.destination.s3.S3StorageOperations; import io.airbyte.integrations.destination.s3.csv.CsvSerializedBuffer; import io.airbyte.integrations.destination.staging.StagingConsumerFactory; import io.airbyte.protocol.models.AirbyteConnectionStatus; +import io.airbyte.protocol.models.AirbyteConnectionStatus.Status; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import java.util.Map; @@ -46,14 +52,26 @@ public RedshiftStagingS3Destination() { super(RedshiftInsertDestination.DRIVER_CLASS, new RedshiftSQLNameTransformer(), new RedshiftSqlOperations()); } + private boolean isEphemeralKeysAndPurgingStagingData(JsonNode config, EncryptionConfig encryptionConfig) { + return !isPurgeStagingData(config) && encryptionConfig instanceof AesCbcEnvelopeEncryption c && c.keyType() == KeyType.EPHEMERAL; + } + @Override public AirbyteConnectionStatus check(final JsonNode config) { - final S3DestinationConfig s3Config = getS3DestinationConfig(config); - S3Destination.attemptS3WriteAndDelete(new S3StorageOperations(new RedshiftSQLNameTransformer(), s3Config.getS3Client(), s3Config), s3Config, ""); + final S3DestinationConfig s3Config = getS3DestinationConfig(findS3Options(config)); + final EncryptionConfig encryptionConfig = config.has("uploading_method") ? + EncryptionConfig.fromJson(config.get("uploading_method").get("encryption")) : new NoEncryption(); + if (isEphemeralKeysAndPurgingStagingData(config, encryptionConfig)) { + return new AirbyteConnectionStatus() + .withStatus(Status.FAILED) + .withMessage( + "You cannot use ephemeral keys and disable purging your staging data. This would produce S3 objects that you cannot decrypt."); + } + S3Destination.attemptS3WriteAndDelete(new S3StorageOperations(new RedshiftSQLNameTransformer(), s3Config.getS3Client(), s3Config), s3Config, s3Config.getBucketPath()); final NamingConventionTransformer nameTransformer = getNamingResolver(); final RedshiftS3StagingSqlOperations redshiftS3StagingSqlOperations = - new RedshiftS3StagingSqlOperations(nameTransformer, s3Config.getS3Client(), s3Config); + new RedshiftS3StagingSqlOperations(nameTransformer, s3Config.getS3Client(), s3Config, encryptionConfig); final DataSource dataSource = getDataSource(config); try { final JdbcDatabase database = new DefaultJdbcDatabase(dataSource); @@ -106,16 +124,19 @@ public JsonNode toJdbcConfig(final JsonNode config) { public AirbyteMessageConsumer getConsumer(final JsonNode config, final ConfiguredAirbyteCatalog catalog, final Consumer outputRecordCollector) { - final S3DestinationConfig s3Config = getS3DestinationConfig(config); + final EncryptionConfig encryptionConfig = config.has("uploading_method") ? + EncryptionConfig.fromJson(config.get("uploading_method").get("encryption")) : new NoEncryption(); + final JsonNode s3Options = findS3Options(config); + final S3DestinationConfig s3Config = getS3DestinationConfig(s3Options); return new StagingConsumerFactory().create( outputRecordCollector, getDatabase(getDataSource(config)), - new RedshiftS3StagingSqlOperations(getNamingResolver(), s3Config.getS3Client(), s3Config), + new RedshiftS3StagingSqlOperations(getNamingResolver(), s3Config.getS3Client(), s3Config, encryptionConfig), getNamingResolver(), CsvSerializedBuffer.createFunction(null, () -> new FileBuffer(CsvSerializedBuffer.CSV_GZ_SUFFIX)), config, catalog, - isPurgeStagingData(config)); + isPurgeStagingData(s3Options)); } private boolean isPurgeStagingData(final JsonNode config) { diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/constants/RedshiftDestinationConstants.java b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/constants/RedshiftDestinationConstants.java new file mode 100644 index 000000000000..15d473c29e3d --- /dev/null +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/constants/RedshiftDestinationConstants.java @@ -0,0 +1,16 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.redshift.constants; + +/** + * Constant holder for Redshift Destination + */ +public class RedshiftDestinationConstants { + + private RedshiftDestinationConstants() {} + + public static final String UPLOADING_METHOD = "uploading_method"; + +} diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/operations/RedshiftS3StagingSqlOperations.java b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/operations/RedshiftS3StagingSqlOperations.java index 6312810e8ea3..9db6f1dfcdbf 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/operations/RedshiftS3StagingSqlOperations.java +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/operations/RedshiftS3StagingSqlOperations.java @@ -13,10 +13,15 @@ import io.airbyte.integrations.destination.record_buffer.SerializableBuffer; import io.airbyte.integrations.destination.redshift.manifest.Entry; import io.airbyte.integrations.destination.redshift.manifest.Manifest; +import io.airbyte.integrations.destination.s3.AesCbcEnvelopeEncryption; +import io.airbyte.integrations.destination.s3.AesCbcEnvelopeEncryptionBlobDecorator; +import io.airbyte.integrations.destination.s3.EncryptionConfig; import io.airbyte.integrations.destination.s3.S3DestinationConfig; import io.airbyte.integrations.destination.s3.S3StorageOperations; import io.airbyte.integrations.destination.s3.credential.S3AccessKeyCredentialConfig; import io.airbyte.integrations.destination.staging.StagingOperations; +import java.util.Base64; +import java.util.Base64.Encoder; import java.util.List; import java.util.Map; import java.util.Optional; @@ -26,18 +31,27 @@ public class RedshiftS3StagingSqlOperations extends RedshiftSqlOperations implements StagingOperations { + private static final Encoder BASE64_ENCODER = Base64.getEncoder(); private final NamingConventionTransformer nameTransformer; private final S3StorageOperations s3StorageOperations; private final S3DestinationConfig s3Config; private final ObjectMapper objectMapper; + private final byte[] keyEncryptingKey; public RedshiftS3StagingSqlOperations(NamingConventionTransformer nameTransformer, AmazonS3 s3Client, - S3DestinationConfig s3Config) { + S3DestinationConfig s3Config, + final EncryptionConfig encryptionConfig) { this.nameTransformer = nameTransformer; this.s3StorageOperations = new S3StorageOperations(nameTransformer, s3Client, s3Config); this.s3Config = s3Config; this.objectMapper = new ObjectMapper(); + if (encryptionConfig instanceof AesCbcEnvelopeEncryption e) { + this.s3StorageOperations.addBlobDecorator(new AesCbcEnvelopeEncryptionBlobDecorator(e.key())); + this.keyEncryptingKey = e.key(); + } else { + this.keyEncryptingKey = null; + } } @Override @@ -49,7 +63,10 @@ public String getStageName(String namespace, String streamName) { @Override public String getStagingPath(UUID connectionId, String namespace, String streamName, DateTime writeDatetime) { - return nameTransformer.applyDefaultCase(String.format("%s/%s_%02d_%02d_%02d_%s/", + final String bucketPath = s3Config.getBucketPath(); + final String prefix = bucketPath.isEmpty() ? "" : bucketPath + (bucketPath.endsWith("/") ? "" : "/"); + return nameTransformer.applyDefaultCase(String.format("%s%s/%s_%02d_%02d_%02d_%s/", + prefix, getStageName(namespace, streamName), writeDatetime.year().get(), writeDatetime.monthOfYear().get(), @@ -60,8 +77,10 @@ public String getStagingPath(UUID connectionId, String namespace, String streamN @Override public void createStageIfNotExists(JdbcDatabase database, String stageName) throws Exception { + final String bucketPath = s3Config.getBucketPath(); + final String prefix = bucketPath.isEmpty() ? "" : bucketPath + (bucketPath.endsWith("/") ? "" : "/"); AirbyteSentry.executeWithTracing("CreateStageIfNotExists", - () -> s3StorageOperations.createBucketObjectIfNotExists(stageName), + () -> s3StorageOperations.createBucketObjectIfNotExists(prefix + stageName), Map.of("stage", stageName)); } @@ -99,10 +118,18 @@ public void copyIntoTmpTableFromStage(JdbcDatabase database, private void executeCopy(final String manifestPath, JdbcDatabase db, String schemaName, String tmpTableName) { final S3AccessKeyCredentialConfig credentialConfig = (S3AccessKeyCredentialConfig) s3Config.getS3CredentialConfig(); + final String encryptionClause; + if (keyEncryptingKey == null) { + encryptionClause = ""; + } else { + encryptionClause = String.format(" encryption = (type = 'aws_cse' master_key = '%s')", BASE64_ENCODER.encodeToString(keyEncryptingKey)); + } + final var copyQuery = String.format( """ COPY %s.%s FROM '%s' CREDENTIALS 'aws_access_key_id=%s;aws_secret_access_key=%s' + %s CSV GZIP REGION '%s' TIMEFORMAT 'auto' STATUPDATE OFF @@ -112,6 +139,7 @@ private void executeCopy(final String manifestPath, JdbcDatabase db, String sche getFullS3Path(s3Config.getBucketName(), manifestPath), credentialConfig.getAccessKeyId(), credentialConfig.getSecretAccessKey(), + encryptionClause, s3Config.getBucketRegion()); Exceptions.toRuntime(() -> db.execute(copyQuery)); @@ -140,15 +168,19 @@ private static String getManifestPath(final String s3BucketName, final String s3 @Override public void cleanUpStage(JdbcDatabase database, String stageName, List stagedFiles) throws Exception { + final String bucketPath = s3Config.getBucketPath(); + final String prefix = bucketPath.isEmpty() ? "" : bucketPath + (bucketPath.endsWith("/") ? "" : "/"); AirbyteSentry.executeWithTracing("CleanStage", - () -> s3StorageOperations.cleanUpBucketObject(stageName, stagedFiles), + () -> s3StorageOperations.cleanUpBucketObject(prefix + stageName, stagedFiles), Map.of("stage", stageName)); } @Override public void dropStageIfExists(JdbcDatabase database, String stageName) throws Exception { + final String bucketPath = s3Config.getBucketPath(); + final String prefix = bucketPath.isEmpty() ? "" : bucketPath + (bucketPath.endsWith("/") ? "" : "/"); AirbyteSentry.executeWithTracing("DropStageIfExists", - () -> s3StorageOperations.dropBucketObject(stageName), + () -> s3StorageOperations.dropBucketObject(prefix + stageName), Map.of("stage", stageName)); } diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/operations/RedshiftSqlOperations.java b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/operations/RedshiftSqlOperations.java index b416d5f3b604..6014fb36440a 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/operations/RedshiftSqlOperations.java +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/operations/RedshiftSqlOperations.java @@ -5,6 +5,7 @@ package io.airbyte.integrations.destination.redshift.operations; import static io.airbyte.db.jdbc.JdbcUtils.getDefaultSourceOperations; +import static java.lang.String.join; import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.commons.json.Jsons; @@ -12,14 +13,17 @@ import io.airbyte.integrations.base.JavaBaseConstants; import io.airbyte.integrations.destination.jdbc.JdbcSqlOperations; import io.airbyte.integrations.destination.jdbc.SqlOperationsUtils; +import io.airbyte.integrations.destination.jdbc.WriteConfig; import io.airbyte.protocol.models.AirbyteRecordMessage; import java.nio.charset.StandardCharsets; import java.sql.SQLException; import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Set; +import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -29,21 +33,25 @@ public class RedshiftSqlOperations extends JdbcSqlOperations { public static final int REDSHIFT_VARCHAR_MAX_BYTE_SIZE = 65535; public static final int REDSHIFT_SUPER_MAX_BYTE_SIZE = 1000000; - private static final String SELECT_ALL_TABLES_WITH_NOT_SUPER_TYPE_SQL_STATEMENT = """ - select tablename, schemaname - from pg_table_def - where tablename in ( - select tablename as tablename - from pg_table_def - where schemaname = '%1$s' - and tablename like '%%airbyte_raw%%' - and "column" in ('%2$s', '%3$s', '%4$s') - group by tablename - having count(*) = 3) - and schemaname = '%1$s' - and type <> 'super' - and "column" = '_airbyte_data'; - """; + private static final String SELECT_ALL_TABLES_WITH_NOT_SUPER_TYPE_SQL_STATEMENT = + """ + select tablename, schemaname + from pg_table_def + where tablename in ( + select tablename as tablename + from pg_table_def + where schemaname = '%1$s' + and tablename in ('%5$s') + and tablename like '%%airbyte_raw%%' + and tablename not in (select table_name + from information_schema.views + where table_schema in ('%1$s')) + and "column" in ('%2$s', '%3$s', '%4$s') + group by tablename + having count(*) = 3) + and schemaname = '%1$s' + and type <> 'super' + and "column" = '_airbyte_data' """; private static final String ALTER_TMP_TABLES_WITH_NOT_SUPER_TYPE_TO_SUPER_TYPE = """ @@ -51,8 +59,8 @@ having count(*) = 3) ALTER TABLE %1$s ADD COLUMN %3$s_reserve TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP; UPDATE %1$s SET %2$s_super = JSON_PARSE(%2$s); UPDATE %1$s SET %3$s_reserve = %3$s; - ALTER TABLE %1$s DROP COLUMN %2$s; - ALTER TABLE %1$s DROP COLUMN %3$s; + ALTER TABLE %1$s DROP COLUMN %2$s CASCADE; + ALTER TABLE %1$s DROP COLUMN %3$s CASCADE; ALTER TABLE %1$s RENAME %2$s_super to %2$s; ALTER TABLE %1$s RENAME %3$s_reserve to %3$s; """; @@ -104,8 +112,8 @@ public boolean isValidData(final JsonNode data) { // check VARCHAR limits for VARCHAR fields within the SUPER object, if overall object is valid if (isValid) { - Map dataMap = Jsons.flatten(data); - for (Object value : dataMap.values()) { + final Map dataMap = Jsons.flatten(data); + for (final Object value : dataMap.values()) { if (value instanceof String stringValue) { final int stringDataSize = stringValue.getBytes(StandardCharsets.UTF_8).length; isValid = stringDataSize <= REDSHIFT_VARCHAR_MAX_BYTE_SIZE; @@ -123,29 +131,60 @@ public boolean isValidData(final JsonNode data) { * SUPER type. This would be done once. * * @param database - Database object for interacting with a JDBC connection. - * @param writeConfigSet - list of write configs. + * @param writeConfigs - list of write configs. */ @Override - public void onDestinationCloseOperations(final JdbcDatabase database, final Set writeConfigSet) { + public void onDestinationCloseOperations(final JdbcDatabase database, final List writeConfigs) { LOGGER.info("Executing operations for Redshift Destination DB engine..."); - List schemaAndTableWithNotSuperType = writeConfigSet + if (writeConfigs.isEmpty()) { + LOGGER.warn("Write config list is EMPTY."); + return; + } + final Map> schemaTableMap = getTheSchemaAndRelatedStreamsMap(writeConfigs); + final List schemaAndTableWithNotSuperType = schemaTableMap + .entrySet() .stream() - .flatMap(schemaName -> discoverNotSuperTables(database, schemaName).stream()) - .toList(); + // String.join() we use to concat tables from list, in query, as follows: SELECT * FROM some_table + // WHERE smt_column IN ('test1', 'test2', etc) + .map(e -> discoverNotSuperTables(database, e.getKey(), join("', '", e.getValue()))) + .flatMap(Collection::stream) + .collect(Collectors.toList()); + if (!schemaAndTableWithNotSuperType.isEmpty()) { updateVarcharDataColumnToSuperDataColumn(database, schemaAndTableWithNotSuperType); } LOGGER.info("Executing operations for Redshift Destination DB engine completed."); } + /** + * The method is responsible for building the map which consists from: Keys - Schema names, Values - + * List of related tables (Streams) + * + * @param writeConfigs - write configs from which schema-related tables map will be built + * @return map with Schemas as Keys and with Tables (Streams) as values + */ + private Map> getTheSchemaAndRelatedStreamsMap(final List writeConfigs) { + final Map> schemaTableMap = new HashMap<>(); + for (final WriteConfig writeConfig : writeConfigs) { + if (schemaTableMap.containsKey(writeConfig.getOutputSchemaName())) { + schemaTableMap.get(writeConfig.getOutputSchemaName()).add(writeConfig.getOutputTableName()); + } else { + schemaTableMap.put(writeConfig.getOutputSchemaName(), new ArrayList<>(Collections.singletonList(writeConfig.getOutputTableName()))); + } + } + return schemaTableMap; + } + /** * @param database - Database object for interacting with a JDBC connection. * @param schemaName - schema to update. + * @param tableName - tables to update. */ - private List discoverNotSuperTables(final JdbcDatabase database, - final String schemaName) { - List schemaAndTableWithNotSuperType = new ArrayList<>(); + private List discoverNotSuperTables(final JdbcDatabase database, final String schemaName, final String tableName) { + + final List schemaAndTableWithNotSuperType = new ArrayList<>(); + try { LOGGER.info("Discovering NOT SUPER table types..."); database.execute(String.format("set search_path to %s", schemaName)); @@ -154,7 +193,8 @@ private List discoverNotSuperTables(final JdbcDatabase database, schemaName, JavaBaseConstants.COLUMN_NAME_DATA, JavaBaseConstants.COLUMN_NAME_EMITTED_AT, - JavaBaseConstants.COLUMN_NAME_AB_ID)), + JavaBaseConstants.COLUMN_NAME_AB_ID, + tableName)), getDefaultSourceOperations()::rowToJson); if (tablesNameWithoutSuperDatatype.isEmpty()) { return Collections.emptyList(); @@ -163,7 +203,7 @@ private List discoverNotSuperTables(final JdbcDatabase database, .forEach(e -> schemaAndTableWithNotSuperType.add(e.get("schemaname").textValue() + "." + e.get("tablename").textValue())); return schemaAndTableWithNotSuperType; } - } catch (SQLException e) { + } catch (final SQLException e) { LOGGER.error("Error during discoverNotSuperTables() appears: ", e); throw new RuntimeException(e); } @@ -177,7 +217,7 @@ private List discoverNotSuperTables(final JdbcDatabase database, */ private void updateVarcharDataColumnToSuperDataColumn(final JdbcDatabase database, final List schemaAndTableWithNotSuperType) { LOGGER.info("Updating VARCHAR data column to SUPER..."); - StringBuilder finalSqlStatement = new StringBuilder(); + final StringBuilder finalSqlStatement = new StringBuilder(); // To keep the previous data, we need to add next columns: _airbyte_data, _airbyte_emitted_at // We do such workflow because we can't directly CAST VARCHAR to SUPER column. _airbyte_emitted_at // column recreated to keep @@ -191,7 +231,7 @@ private void updateVarcharDataColumnToSuperDataColumn(final JdbcDatabase databas }); try { database.execute(finalSqlStatement.toString()); - } catch (SQLException e) { + } catch (final SQLException e) { LOGGER.error("Error during updateVarcharDataColumnToSuperDataColumn() appears: ", e); throw new RuntimeException(e); } diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/util/RedshiftUtil.java b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/util/RedshiftUtil.java new file mode 100644 index 000000000000..1e4186f67d01 --- /dev/null +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/util/RedshiftUtil.java @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.redshift.util; + +import static io.airbyte.integrations.destination.redshift.constants.RedshiftDestinationConstants.UPLOADING_METHOD; + +import com.fasterxml.jackson.databind.JsonNode; + +/** + * Helper class for Destination Redshift connector. + */ +public class RedshiftUtil { + + private RedshiftUtil() {} + + // We check whether config located in root of node. (This check is done for Backward compatibility) + public static JsonNode findS3Options(final JsonNode config) { + return config.has(UPLOADING_METHOD) ? config.get(UPLOADING_METHOD) : config; + } + + public static boolean anyOfS3FieldsAreNullOrEmpty(final JsonNode jsonNode) { + return isNullOrEmpty(jsonNode.get("s3_bucket_name")) + && isNullOrEmpty(jsonNode.get("s3_bucket_region")) + && isNullOrEmpty(jsonNode.get("access_key_id")) + && isNullOrEmpty(jsonNode.get("secret_access_key")); + } + + private static boolean isNullOrEmpty(final JsonNode jsonNode) { + return null == jsonNode || "".equals(jsonNode.asText()); + } + +} diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-redshift/src/main/resources/spec.json index 243259955ddf..85ff89946bb7 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/destination-redshift/src/main/resources/spec.json @@ -48,75 +48,142 @@ "default": "public", "title": "Default Schema" }, - "s3_bucket_name": { - "title": "S3 Bucket Name (Optional)", - "type": "string", - "description": "The name of the staging S3 bucket to use if utilising a COPY strategy. COPY is recommended for production workloads for better speed and scalability. See AWS docs for more details.", - "examples": ["airbyte.staging"] - }, - "s3_bucket_path": { - "title": "S3 Bucket Path (Optional)", - "type": "string", - "description": "The directory under the S3 bucket where data will be written. If not provided, then defaults to the root directory. See path's name recommendations for more details.", - "examples": ["data_sync/test"] - }, - "s3_bucket_region": { - "title": "S3 Bucket Region (Optional)", - "type": "string", - "default": "", - "description": "The region of the S3 staging bucket to use if utilising a COPY strategy. See AWS docs for details.", - "enum": [ - "", - "us-east-1", - "us-east-2", - "us-west-1", - "us-west-2", - "af-south-1", - "ap-east-1", - "ap-south-1", - "ap-northeast-1", - "ap-northeast-2", - "ap-northeast-3", - "ap-southeast-1", - "ap-southeast-2", - "ca-central-1", - "cn-north-1", - "cn-northwest-1", - "eu-central-1", - "eu-north-1", - "eu-south-1", - "eu-west-1", - "eu-west-2", - "eu-west-3", - "sa-east-1", - "me-south-1" + "uploading_method": { + "title": "Uploading Method", + "type": "object", + "description": "The method how the data will be uploaded to the database.", + "oneOf": [ + { + "title": "Standard", + "required": ["method"], + "properties": { + "method": { + "type": "string", + "const": "Standard" + } + } + }, + { + "title": "S3 Staging", + "required": [ + "method", + "s3_bucket_name", + "s3_bucket_region", + "access_key_id", + "secret_access_key" + ], + "properties": { + "method": { + "type": "string", + "const": "S3 Staging" + }, + "s3_bucket_name": { + "title": "S3 Bucket Name", + "type": "string", + "description": "The name of the staging S3 bucket to use if utilising a COPY strategy. COPY is recommended for production workloads for better speed and scalability. See AWS docs for more details.", + "examples": ["airbyte.staging"] + }, + "s3_bucket_path": { + "title": "S3 Bucket Path (Optional)", + "type": "string", + "description": "The directory under the S3 bucket where data will be written. If not provided, then defaults to the root directory. See path's name recommendations for more details.", + "examples": ["data_sync/test"] + }, + "s3_bucket_region": { + "title": "S3 Bucket Region", + "type": "string", + "default": "", + "description": "The region of the S3 staging bucket to use if utilising a COPY strategy. See AWS docs for details.", + "enum": [ + "", + "us-east-1", + "us-east-2", + "us-west-1", + "us-west-2", + "af-south-1", + "ap-east-1", + "ap-south-1", + "ap-northeast-1", + "ap-northeast-2", + "ap-northeast-3", + "ap-southeast-1", + "ap-southeast-2", + "ca-central-1", + "cn-north-1", + "cn-northwest-1", + "eu-central-1", + "eu-north-1", + "eu-south-1", + "eu-west-1", + "eu-west-2", + "eu-west-3", + "sa-east-1", + "me-south-1" + ] + }, + "access_key_id": { + "type": "string", + "description": "This ID grants access to the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket. See AWS docs on how to generate an access key ID and secret access key.", + "title": "S3 Key Id", + "airbyte_secret": true + }, + "secret_access_key": { + "type": "string", + "description": "The corresponding secret to the above access key id. See AWS docs on how to generate an access key ID and secret access key.", + "title": "S3 Access Key", + "airbyte_secret": true + }, + "purge_staging_data": { + "title": "Purge Staging Files and Tables (Optional)", + "type": "boolean", + "description": "Whether to delete the staging files from S3 after completing the sync. See docs for details.", + "default": true + }, + "encryption": { + "title": "Encryption", + "type": "object", + "description": "How to encrypt the staging data", + "default": { "encryption_type": "none" }, + "oneOf": [ + { + "title": "No encryption", + "description": "Staging data will be stored in plaintext.", + "type": "object", + "required": ["encryption_type"], + "properties": { + "encryption_type": { + "type": "string", + "const": "none", + "enum": ["none"], + "default": "none" + } + } + }, + { + "title": "AES-CBC envelope encryption", + "description": "Staging data will be encrypted using AES-CBC envelope encryption.", + "type": "object", + "required": ["encryption_type"], + "properties": { + "encryption_type": { + "type": "string", + "const": "aes_cbc_envelope", + "enum": ["aes_cbc_envelope"], + "default": "aes_cbc_envelope" + }, + "key_encrypting_key": { + "type": "string", + "title": "Key", + "description": "The key, base64-encoded. Must be either 128, 192, or 256 bits. Leave blank to have Airbyte generate an ephemeral key for each sync.", + "airbyte_secret": true + } + } + } + ] + } + } + } ] - }, - "access_key_id": { - "type": "string", - "description": "This ID grants access to the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket. See AWS docs on how to generate an access key ID and secret access key.", - "title": "S3 Key Id (Optional)", - "airbyte_secret": true - }, - "secret_access_key": { - "type": "string", - "description": "The corresponding secret to the above access key id. See AWS docs on how to generate an access key ID and secret access key.", - "title": "S3 Access Key (Optional)", - "airbyte_secret": true - }, - "part_size": { - "type": "integer", - "minimum": 10, - "maximum": 100, - "examples": ["10"], - "description": "Increase this if syncing tables larger than 100GB. Only relevant for COPY. Files are streamed to S3 in parts. This determines the size of each part, in MBs. As S3 has a limit of 10,000 parts per file, part size affects the table size. This is 10MB by default, resulting in a default limit of 100GB tables. Note: a larger part size will result in larger memory requirements. A rule of thumb is to multiply the part size by 10 to get the memory requirement. Modify this with care. See docs for details.", - "title": "Stream Part Size (Optional)" - }, - "purge_staging_data": { - "title": "Purge Staging Files and Tables (Optional)", - "type": "boolean", - "description": "Whether to delete the staging files from S3 after completing the sync. See docs for details.", - "default": true } } } diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftInsertDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftInsertDestinationAcceptanceTest.java index 10bdf80c1fd4..4e2268540f53 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftInsertDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftInsertDestinationAcceptanceTest.java @@ -8,10 +8,8 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import io.airbyte.commons.io.IOs; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.string.Strings; import io.airbyte.db.Database; @@ -25,6 +23,8 @@ import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.DestinationSyncMode; import io.airbyte.protocol.models.JsonSchemaType; +import java.io.IOException; +import java.nio.file.Files; import java.nio.file.Path; import java.sql.SQLException; import java.time.Instant; @@ -46,6 +46,7 @@ class RedshiftInsertDestinationAcceptanceTest extends RedshiftStagingS3Destinati private static final Instant NOW = Instant.now(); private static final String USERS_STREAM_NAME = "users_" + RandomStringUtils.randomAlphabetic(5); + private static final String BOOKS_STREAM_NAME = "books_" + RandomStringUtils.randomAlphabetic(5); private static final AirbyteMessage MESSAGE_USERS1 = new AirbyteMessage().withType(AirbyteMessage.Type.RECORD) .withRecord(new AirbyteRecordMessage().withStream(USERS_STREAM_NAME) @@ -63,17 +64,8 @@ class RedshiftInsertDestinationAcceptanceTest extends RedshiftStagingS3Destinati private static final AirbyteMessage MESSAGE_STATE = new AirbyteMessage().withType(AirbyteMessage.Type.STATE) .withState(new AirbyteStateMessage().withData(Jsons.jsonNode(ImmutableMap.builder().put("checkpoint", "now!").build()))); - public JsonNode getStaticConfig() { - return removeStagingConfigurationFromRedshift(Jsons.deserialize(IOs.readFile(Path.of("secrets/config.json")))); - } - - public static JsonNode removeStagingConfigurationFromRedshift(final JsonNode config) { - final var original = (ObjectNode) Jsons.clone(config); - original.remove("s3_bucket_name"); - original.remove("s3_bucket_region"); - original.remove("access_key_id"); - original.remove("secret_access_key"); - return original; + public JsonNode getStaticConfig() throws IOException { + return Jsons.deserialize(Files.readString(Path.of("secrets/config.json"))); } void setup() { @@ -86,14 +78,17 @@ void setup() { .withDestinationSyncMode(DestinationSyncMode.APPEND))); } + @Test void testIfSuperTmpTableWasCreatedAfterVarcharTmpTable() throws Exception { setup(); - Database database = getDatabase(); - String rawTableName = this.getNamingResolver().getRawTableName(USERS_STREAM_NAME); - createTmpTableWithVarchar(database, rawTableName); + final Database database = getDatabase(); + final String usersStream = getNamingResolver().getRawTableName(USERS_STREAM_NAME); + final String booksStream = getNamingResolver().getRawTableName(BOOKS_STREAM_NAME); + createTmpTableWithVarchar(database, usersStream); + createTmpTableWithVarchar(database, booksStream); - assertTrue(isTmpTableDataColumnInExpectedType(database, DATASET_ID, rawTableName, "character varying")); + assertTrue(isTmpTableDataColumnInExpectedType(database, DATASET_ID, usersStream, "character varying")); final Destination destination = new RedshiftDestination(); final AirbyteMessageConsumer consumer = destination.getConsumer(config, catalog, Destination::defaultOutputRecordCollector); @@ -103,7 +98,8 @@ void testIfSuperTmpTableWasCreatedAfterVarcharTmpTable() throws Exception { consumer.accept(MESSAGE_STATE); consumer.close(); - assertTrue(isTmpTableDataColumnInExpectedType(database, DATASET_ID, rawTableName, "super")); + assertTrue(isTmpTableDataColumnInExpectedType(database, DATASET_ID, usersStream, "super")); + assertTrue(isTmpTableDataColumnInExpectedType(database, DATASET_ID, booksStream, "character varying")); final List usersActual = retrieveRecords(testDestinationEnv, USERS_STREAM_NAME, DATASET_ID, config); final List expectedUsersJson = Lists.newArrayList( diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftS3StagingInsertDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftS3StagingInsertDestinationAcceptanceTest.java index 673b4a957401..2c35d769f2b7 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftS3StagingInsertDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftS3StagingInsertDestinationAcceptanceTest.java @@ -40,6 +40,8 @@ public class RedshiftS3StagingInsertDestinationAcceptanceTest extends RedshiftSt private ConfiguredAirbyteCatalog catalog; private static final Instant NOW = Instant.now(); + + private static final String USERS_STREAM_NAME = "users_" + RandomStringUtils.randomAlphabetic(5); private static final String BOOKS_STREAM_NAME = "books_" + RandomStringUtils.randomAlphabetic(5); private static final AirbyteMessage MESSAGE_BOOKS1 = new AirbyteMessage().withType(AirbyteMessage.Type.RECORD) @@ -59,7 +61,7 @@ public class RedshiftS3StagingInsertDestinationAcceptanceTest extends RedshiftSt .withState(new AirbyteStateMessage().withData(Jsons.jsonNode(ImmutableMap.builder().put("checkpoint", "now!").build()))); public JsonNode getStaticConfig() { - return Jsons.deserialize(IOs.readFile(Path.of("secrets/config.json"))); + return Jsons.deserialize(IOs.readFile(Path.of("secrets/config_staging.json"))); } void setup() { @@ -75,11 +77,13 @@ void setup() { @Test void testIfSuperTmpTableWasCreatedAfterVarcharTmpTableDuringS3Staging() throws Exception { setup(); - Database database = getDatabase(); - String rawTableName = this.getNamingResolver().getRawTableName(BOOKS_STREAM_NAME); - createTmpTableWithVarchar(database, rawTableName); + final Database database = getDatabase(); + final String booksStream = getNamingResolver().getRawTableName(BOOKS_STREAM_NAME); + final String usersStream = getNamingResolver().getRawTableName(USERS_STREAM_NAME); + createTmpTableWithVarchar(database, usersStream); + createTmpTableWithVarchar(database, booksStream); - assertTrue(isTmpTableDataColumnInExpectedType(database, DATASET_ID, rawTableName, "character varying")); + assertTrue(isTmpTableDataColumnInExpectedType(database, DATASET_ID, booksStream, "character varying")); final Destination destination = new RedshiftDestination(); final AirbyteMessageConsumer consumer = destination.getConsumer(config, catalog, Destination::defaultOutputRecordCollector); @@ -89,7 +93,8 @@ void testIfSuperTmpTableWasCreatedAfterVarcharTmpTableDuringS3Staging() throws E consumer.accept(MESSAGE_STATE); consumer.close(); - assertTrue(isTmpTableDataColumnInExpectedType(database, DATASET_ID, rawTableName, "super")); + assertTrue(isTmpTableDataColumnInExpectedType(database, DATASET_ID, booksStream, "super")); + assertTrue(isTmpTableDataColumnInExpectedType(database, DATASET_ID, usersStream, "character varying")); final List booksActual = retrieveRecords(testDestinationEnv, BOOKS_STREAM_NAME, DATASET_ID, config); final List expectedUsersJson = Lists.newArrayList( diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftStagingS3DestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftStagingS3DestinationAcceptanceTest.java index 5bda7cfec80c..cada9007db36 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftStagingS3DestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftStagingS3DestinationAcceptanceTest.java @@ -17,6 +17,7 @@ import io.airbyte.integrations.destination.redshift.operations.RedshiftSqlOperations; import io.airbyte.integrations.standardtest.destination.JdbcDestinationAcceptanceTest; import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; +import java.io.IOException; import java.nio.file.Path; import java.sql.SQLException; import java.util.List; @@ -52,8 +53,8 @@ protected JsonNode getConfig() { return config; } - public JsonNode getStaticConfig() { - return Jsons.deserialize(IOs.readFile(Path.of("secrets/config.json"))); + public JsonNode getStaticConfig() throws IOException { + return Jsons.deserialize(IOs.readFile(Path.of("secrets/config_staging.json"))); } @Override diff --git a/airbyte-integrations/connectors/destination-redshift/src/test/java/io/airbyte/integrations/destination/redshift/RedshiftDestinationTest.java b/airbyte-integrations/connectors/destination-redshift/src/test/java/io/airbyte/integrations/destination/redshift/RedshiftDestinationTest.java index 58d1e53fc9f6..bfc1f2897ee8 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test/java/io/airbyte/integrations/destination/redshift/RedshiftDestinationTest.java +++ b/airbyte-integrations/connectors/destination-redshift/src/test/java/io/airbyte/integrations/destination/redshift/RedshiftDestinationTest.java @@ -18,22 +18,45 @@ public class RedshiftDestinationTest { private static final ObjectMapper mapper = MoreMappers.initMapper(); @Test - @DisplayName("When given S3 credentials should use COPY with SUPER Datatype") - public void useS3Staging() { - final var stubConfig = mapper.createObjectNode(); - stubConfig.put("s3_bucket_name", "fake-bucket"); - stubConfig.put("s3_bucket_region", "fake-region"); - stubConfig.put("access_key_id", "test"); - stubConfig.put("secret_access_key", "test key"); + @DisplayName("When not given S3 credentials should use INSERT") + public void useStandardInsert() { + final var standardInsertConfigStub = mapper.createObjectNode(); + standardInsertConfigStub.put("method", "Standard"); + final var uploadingMethodStub = mapper.createObjectNode(); + uploadingMethodStub.set("uploading_method", standardInsertConfigStub); + assertEquals(DestinationType.STANDARD, RedshiftDestination.determineUploadMode(uploadingMethodStub)); + } - assertEquals(DestinationType.COPY_S3, RedshiftDestination.determineUploadMode(stubConfig)); + @Test + @DisplayName("When given standard backward compatibility test") + public void useStandardInsertBackwardCompatibility() { + final var standardInsertConfigStub = mapper.createObjectNode(); + assertEquals(DestinationType.STANDARD, RedshiftDestination.determineUploadMode(standardInsertConfigStub)); } @Test - @DisplayName("When not given S3 credentials should use INSERT with SUPER Datatype") - public void useStandardInsert() { - final var stubConfig = mapper.createObjectNode(); - assertEquals(DestinationType.STANDARD, RedshiftDestination.determineUploadMode(stubConfig)); + @DisplayName("When given S3 credentials should use COPY") + public void useS3Staging() { + final var s3StagingStub = mapper.createObjectNode(); + final var uploadingMethodStub = mapper.createObjectNode(); + s3StagingStub.put("s3_bucket_name", "fake-bucket"); + s3StagingStub.put("s3_bucket_region", "fake-region"); + s3StagingStub.put("access_key_id", "test"); + s3StagingStub.put("secret_access_key", "test key"); + s3StagingStub.put("method", "S3 Staging"); + uploadingMethodStub.set("uploading_method", s3StagingStub); + assertEquals(DestinationType.COPY_S3, RedshiftDestination.determineUploadMode(uploadingMethodStub)); + } + + @Test + @DisplayName("When given S3 backward compatibility test") + public void useS3StagingBackwardCompatibility() { + final var s3StagingStub = mapper.createObjectNode(); + s3StagingStub.put("s3_bucket_name", "fake-bucket"); + s3StagingStub.put("s3_bucket_region", "fake-region"); + s3StagingStub.put("access_key_id", "test"); + s3StagingStub.put("secret_access_key", "test key"); + assertEquals(DestinationType.COPY_S3, RedshiftDestination.determineUploadMode(s3StagingStub)); } } diff --git a/airbyte-integrations/connectors/destination-redshift/src/test/java/io/airbyte/integrations/destination/redshift/copiers/RedshiftStreamCopierTest.java b/airbyte-integrations/connectors/destination-redshift/src/test/java/io/airbyte/integrations/destination/redshift/copiers/RedshiftStreamCopierTest.java index c6eca7829607..6681540b0425 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test/java/io/airbyte/integrations/destination/redshift/copiers/RedshiftStreamCopierTest.java +++ b/airbyte-integrations/connectors/destination-redshift/src/test/java/io/airbyte/integrations/destination/redshift/copiers/RedshiftStreamCopierTest.java @@ -39,7 +39,6 @@ class RedshiftStreamCopierTest { private static final Logger LOGGER = LoggerFactory.getLogger(RedshiftStreamCopierTest.class); - private static final int PART_SIZE = 5; private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); // The full path would be something like @@ -71,7 +70,6 @@ public void setup() { "fake-region") .withEndpoint("fake-endpoint") .withAccessKeyCredential("fake-access-key-id", "fake-secret-access-key") - .withPartSize(PART_SIZE) .get(); copier = new RedshiftStreamCopier( diff --git a/airbyte-integrations/connectors/destination-redshift/src/test/java/io/airbyte/integrations/destination/redshift/util/RedshiftUtilTest.java b/airbyte-integrations/connectors/destination-redshift/src/test/java/io/airbyte/integrations/destination/redshift/util/RedshiftUtilTest.java new file mode 100644 index 000000000000..f5e167cc7668 --- /dev/null +++ b/airbyte-integrations/connectors/destination-redshift/src/test/java/io/airbyte/integrations/destination/redshift/util/RedshiftUtilTest.java @@ -0,0 +1,67 @@ +package io.airbyte.integrations.destination.redshift.util; + +import static io.airbyte.integrations.destination.redshift.constants.RedshiftDestinationConstants.UPLOADING_METHOD; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.fasterxml.jackson.databind.JsonNode; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class RedshiftUtilTest { + + @Test + @DisplayName("Should return the config when the config has uploading method") + public void testFindS3OptionsWhenConfigHasUploadingMethod() { + JsonNode config = mock(JsonNode.class); + JsonNode uploadingMethod = mock(JsonNode.class); + when(config.has(UPLOADING_METHOD)).thenReturn(true); + when(config.get(UPLOADING_METHOD)).thenReturn(uploadingMethod); + + JsonNode result = RedshiftUtil.findS3Options(config); + + assertEquals(uploadingMethod, result); + } + + @Test + @DisplayName("Should return the config when the config does not have uploading method") + public void testFindS3OptionsWhenConfigDoesNotHaveUploadingMethod() { + JsonNode config = mock(JsonNode.class); + when(config.has(UPLOADING_METHOD)).thenReturn(false); + + JsonNode result = RedshiftUtil.findS3Options(config); + + assertEquals(config, result); + } + + @Test + @DisplayName("Should return true when all of the fields are null or empty") + public void testAnyOfS3FieldsAreNullOrEmptyWhenAllOfTheFieldsAreNullOrEmptyThenReturnTrue() { + JsonNode jsonNode = mock(JsonNode.class); + when(jsonNode.get("s3_bucket_name")).thenReturn(null); + when(jsonNode.get("s3_bucket_region")).thenReturn(null); + when(jsonNode.get("access_key_id")).thenReturn(null); + when(jsonNode.get("secret_access_key")).thenReturn(null); + + assertTrue(RedshiftUtil.anyOfS3FieldsAreNullOrEmpty(jsonNode)); + } + + @Test + @DisplayName("Should return false when all S3 required fields are not null or empty") + public void testAllS3RequiredAreNotNullOrEmptyThenReturnFalse() { + JsonNode jsonNode = mock(JsonNode.class); + when(jsonNode.get("s3_bucket_name")).thenReturn(mock(JsonNode.class)); + when(jsonNode.get("s3_bucket_name").asText()).thenReturn("test"); + when(jsonNode.get("s3_bucket_region")).thenReturn(mock(JsonNode.class)); + when(jsonNode.get("s3_bucket_region").asText()).thenReturn("test"); + when(jsonNode.get("access_key_id")).thenReturn(mock(JsonNode.class)); + when(jsonNode.get("access_key_id").asText()).thenReturn("test"); + when(jsonNode.get("secret_access_key")).thenReturn(mock(JsonNode.class)); + when(jsonNode.get("secret_access_key").asText()).thenReturn("test"); + + assertFalse(RedshiftUtil.anyOfS3FieldsAreNullOrEmpty(jsonNode)); + } +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-rockset/Dockerfile b/airbyte-integrations/connectors/destination-rockset/Dockerfile index 73477dc97bb6..136dbcd02b48 100644 --- a/airbyte-integrations/connectors/destination-rockset/Dockerfile +++ b/airbyte-integrations/connectors/destination-rockset/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-rockset COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.2 +LABEL io.airbyte.version=0.1.3 LABEL io.airbyte.name=airbyte/destination-rockset diff --git a/airbyte-integrations/connectors/destination-s3/Dockerfile b/airbyte-integrations/connectors/destination-s3/Dockerfile index d655f2f8800a..e934673af34e 100644 --- a/airbyte-integrations/connectors/destination-s3/Dockerfile +++ b/airbyte-integrations/connectors/destination-s3/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-s3 COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.3.6 +LABEL io.airbyte.version=0.3.9 LABEL io.airbyte.name=airbyte/destination-s3 diff --git a/airbyte-integrations/connectors/destination-s3/build.gradle b/airbyte-integrations/connectors/destination-s3/build.gradle index d43f0bcd1136..49da4da3cdf0 100644 --- a/airbyte-integrations/connectors/destination-s3/build.gradle +++ b/airbyte-integrations/connectors/destination-s3/build.gradle @@ -21,9 +21,9 @@ dependencies { implementation 'com.github.alexmojaki:s3-stream-upload:2.2.2' // parquet - implementation group: 'org.apache.hadoop', name: 'hadoop-common', version: '3.3.0' - implementation group: 'org.apache.hadoop', name: 'hadoop-aws', version: '3.3.0' - implementation group: 'org.apache.hadoop', name: 'hadoop-mapreduce-client-core', version: '3.3.0' + implementation group: 'org.apache.hadoop', name: 'hadoop-common', version: '3.3.3' + implementation group: 'org.apache.hadoop', name: 'hadoop-aws', version: '3.3.3' + implementation group: 'org.apache.hadoop', name: 'hadoop-mapreduce-client-core', version: '3.3.3' implementation group: 'org.apache.parquet', name: 'parquet-avro', version: '1.12.0' implementation group: 'com.github.airbytehq', name: 'json-avro-converter', version: '1.0.1' diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationConfig.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationConfig.java index 7c8044706f5d..34bfed0eb745 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationConfig.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationConfig.java @@ -20,8 +20,7 @@ /** * An S3 configuration. Typical usage sets at most one of {@code bucketPath} (necessary for more - * delicate data syncing to S3) and {@code partSize} (used by certain bulk-load database - * operations). + * delicate data syncing to S3) */ public class S3DestinationConfig { @@ -33,8 +32,6 @@ public class S3DestinationConfig { private final String bucketRegion; private final String pathFormat; private final S3CredentialConfig credentialConfig; - @Deprecated - private final Integer partSize; private final S3FormatConfig formatConfig; private final Object lock = new Object(); @@ -46,7 +43,6 @@ public S3DestinationConfig(final String endpoint, final String bucketRegion, final String pathFormat, final S3CredentialConfig credentialConfig, - final Integer partSize, final S3FormatConfig formatConfig, final AmazonS3 s3Client) { this.endpoint = endpoint; @@ -56,7 +52,6 @@ public S3DestinationConfig(final String endpoint, this.pathFormat = pathFormat; this.credentialConfig = credentialConfig; this.formatConfig = formatConfig; - this.partSize = partSize; this.s3Client = s3Client; } @@ -68,7 +63,6 @@ public static Builder create(final S3DestinationConfig config) { return new Builder(config.getBucketName(), config.getBucketPath(), config.getBucketRegion()) .withEndpoint(config.getEndpoint()) .withCredentialConfig(config.getS3CredentialConfig()) - .withPartSize(config.getPartSize()) .withFormatConfig(config.getFormatConfig()); } @@ -90,10 +84,6 @@ public static S3DestinationConfig getS3DestinationConfig(final JsonNode config) builder = builder.withEndpoint(config.get("s3_endpoint").asText()); } - if (config.has("part_size")) { - builder = builder.withPartSize(config.get("part_size").asInt()); - } - final S3CredentialConfig credentialConfig; if (config.has("access_key_id")) { credentialConfig = new S3AccessKeyCredentialConfig(config.get("access_key_id").asText(), config.get("secret_access_key").asText()); @@ -135,10 +125,6 @@ public S3CredentialConfig getS3CredentialConfig() { return credentialConfig; } - public Integer getPartSize() { - return partSize; - } - public S3FormatConfig getFormatConfig() { return formatConfig; } @@ -217,7 +203,6 @@ public static class Builder { private String endpoint = ""; private String pathFormat = S3DestinationConstants.DEFAULT_PATH_FORMAT; - private int partSize = S3DestinationConstants.DEFAULT_PART_SIZE_MB; private String bucketName; private String bucketPath; @@ -257,11 +242,6 @@ public Builder withEndpoint(final String endpoint) { return this; } - public Builder withPartSize(final int partSize) { - this.partSize = partSize; - return this; - } - public Builder withFormatConfig(final S3FormatConfig formatConfig) { this.formatConfig = formatConfig; return this; @@ -290,7 +270,6 @@ public S3DestinationConfig get() { bucketRegion, pathFormat, credentialConfig, - partSize, formatConfig, s3Client); } diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationConstants.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationConstants.java index c3b9013acb53..89641d9357ad 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationConstants.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationConstants.java @@ -11,11 +11,6 @@ public final class S3DestinationConstants { public static final String YYYY_MM_DD_FORMAT_STRING = "yyyy_MM_dd"; public static final S3NameTransformer NAME_TRANSFORMER = new S3NameTransformer(); - public static final String PART_SIZE_MB_ARG_NAME = "part_size_mb"; - // The smallest part size is 5MB. An S3 upload can be maximally formed of 10,000 parts. This gives - // us an upper limit of 10,000 * 10 / 1000 = 100 GB per table with a 10MB part size limit. - // WARNING: Too large a part size can cause potential OOM errors. - public static final int DEFAULT_PART_SIZE_MB = 10; public static final String DEFAULT_PATH_FORMAT = "${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_"; // gzip compression for CSV and JSONL diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3FormatConfig.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3FormatConfig.java index 88e1b124d16c..77856bdcec2a 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3FormatConfig.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3FormatConfig.java @@ -10,8 +10,6 @@ public interface S3FormatConfig { S3Format getFormat(); - Long getPartSize(); - String getFileExtension(); static String withDefault(final JsonNode config, final String property, final String defaultValue) { diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3StorageOperations.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3StorageOperations.java index 4b62a4ed3a8f..59a9fa92a9ea 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3StorageOperations.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3StorageOperations.java @@ -133,7 +133,7 @@ public String uploadRecordsToBucket(final SerializableBuffer recordsData, * @return the uploaded filename, which is different from the serialized buffer filename */ private String loadDataIntoBucket(final String objectPath, final SerializableBuffer recordsData) throws IOException { - final long partSize = s3Config.getFormatConfig() != null ? s3Config.getFormatConfig().getPartSize() : DEFAULT_PART_SIZE; + final long partSize = DEFAULT_PART_SIZE; final String bucket = s3Config.getBucketName(); final String fullObjectKey = objectPath + getPartId(objectPath) + getExtension(recordsData.getFilename()); diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/JsonSchemaType.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/JsonSchemaType.java index e5121cdf0421..0fa759a6acbf 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/JsonSchemaType.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/JsonSchemaType.java @@ -4,6 +4,10 @@ package io.airbyte.integrations.destination.s3.avro; +import java.util.Arrays; +import java.util.List; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import org.apache.avro.Schema; /** @@ -11,32 +15,60 @@ */ public enum JsonSchemaType { - STRING("string", true, Schema.Type.STRING), - NUMBER("number", true, Schema.Type.DOUBLE), - INTEGER("integer", true, Schema.Type.INT), - BOOLEAN("boolean", true, Schema.Type.BOOLEAN), - NULL("null", true, Schema.Type.NULL), - OBJECT("object", false, Schema.Type.RECORD), - ARRAY("array", false, Schema.Type.ARRAY), - COMBINED("combined", false, Schema.Type.UNION); + STRING("string", true, null, Schema.Type.STRING), + NUMBER_INT("number", true, "integer", Schema.Type.INT), + NUMBER_LONG("number", true, "big_integer", Schema.Type.LONG), + NUMBER_FLOAT("number", true, "float", Schema.Type.FLOAT), + NUMBER("number", true, null, Schema.Type.DOUBLE), + INTEGER("integer", true, null, Schema.Type.INT), + BOOLEAN("boolean", true, null, Schema.Type.BOOLEAN), + NULL("null", true, null, Schema.Type.NULL), + OBJECT("object", false, null, Schema.Type.RECORD), + ARRAY("array", false, null, Schema.Type.ARRAY), + COMBINED("combined", false, null, Schema.Type.UNION); private final String jsonSchemaType; private final boolean isPrimitive; private final Schema.Type avroType; + private final String jsonSchemaAirbyteType; - JsonSchemaType(final String jsonSchemaType, final boolean isPrimitive, final Schema.Type avroType) { + JsonSchemaType(final String jsonSchemaType, final boolean isPrimitive, final String jsonSchemaAirbyteType, final Schema.Type avroType) { this.jsonSchemaType = jsonSchemaType; + this.jsonSchemaAirbyteType = jsonSchemaAirbyteType; this.isPrimitive = isPrimitive; this.avroType = avroType; } - public static JsonSchemaType fromJsonSchemaType(final String value) { - for (final JsonSchemaType type : values()) { - if (value.equals(type.jsonSchemaType)) { - return type; - } + public static JsonSchemaType fromJsonSchemaType(final String jsonSchemaType) { + return fromJsonSchemaType(jsonSchemaType, null); + } + + public static JsonSchemaType fromJsonSchemaType(final @Nonnull String jsonSchemaType, final @Nullable String jsonSchemaAirbyteType) { + List matchSchemaType = null; + // Match by Type + airbyteType + if (jsonSchemaAirbyteType != null) { + matchSchemaType = Arrays.stream(values()) + .filter(type -> jsonSchemaType.equals(type.jsonSchemaType)) + .filter(type -> jsonSchemaAirbyteType.equals(type.jsonSchemaAirbyteType)) + .toList(); + } + + // Match by Type are no results already + if (matchSchemaType == null || matchSchemaType.isEmpty()) { + matchSchemaType = + Arrays.stream(values()).filter(format -> jsonSchemaType.equals(format.jsonSchemaType) && format.jsonSchemaAirbyteType == null).toList(); + } + + if (matchSchemaType.isEmpty()) { + throw new IllegalArgumentException( + String.format("Unexpected jsonSchemaType - %s and jsonSchemaAirbyteType - %s", jsonSchemaType, jsonSchemaAirbyteType)); + } else if (matchSchemaType.size() > 1) { + throw new RuntimeException( + String.format("Match with more than one json type! Matched types : %s, Inputs jsonSchemaType : %s, jsonSchemaAirbyteType : %s", + matchSchemaType, jsonSchemaType, jsonSchemaAirbyteType)); + } else { + return matchSchemaType.get(0); } - throw new IllegalArgumentException("Unexpected json schema type: " + value); } public String getJsonSchemaType() { @@ -56,4 +88,8 @@ public String toString() { return jsonSchemaType; } + public String getJsonSchemaAirbyteType() { + return jsonSchemaAirbyteType; + } + } diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/JsonToAvroSchemaConverter.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/JsonToAvroSchemaConverter.java index 73703c98ac11..920ddfbaa1a8 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/JsonToAvroSchemaConverter.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/JsonToAvroSchemaConverter.java @@ -36,6 +36,8 @@ */ public class JsonToAvroSchemaConverter { + private static final String TYPE = "type"; + private static final String AIRBYTE_TYPE = "airbyte_type"; private static final Schema UUID_SCHEMA = LogicalTypes.uuid() .addToSchema(Schema.create(Schema.Type.STRING)); private static final Schema NULL_SCHEMA = Schema.create(Schema.Type.NULL); @@ -60,7 +62,9 @@ static List getTypes(final String fieldName, final JsonNode fiel return Collections.singletonList(JsonSchemaType.COMBINED); } - final JsonNode typeProperty = fieldDefinition.get("type"); + final JsonNode typeProperty = fieldDefinition.get(TYPE); + final JsonNode airbyteTypeProperty = fieldDefinition.get(AIRBYTE_TYPE); + final String airbyteType = airbyteTypeProperty == null ? null : airbyteTypeProperty.asText(); if (typeProperty == null || typeProperty.isNull()) { LOGGER.warn("Field \"{}\" has no type specification. It will default to string", fieldName); return Collections.singletonList(JsonSchemaType.STRING); @@ -73,7 +77,7 @@ static List getTypes(final String fieldName, final JsonNode fiel } if (typeProperty.isTextual()) { - return Collections.singletonList(JsonSchemaType.fromJsonSchemaType(typeProperty.asText())); + return Collections.singletonList(JsonSchemaType.fromJsonSchemaType(typeProperty.asText(), airbyteType)); } LOGGER.warn("Field \"{}\" has unexpected type {}. It will default to string.", fieldName, typeProperty); @@ -214,7 +218,7 @@ Schema parseSingleType(final String fieldName, final Schema fieldSchema; switch (fieldType) { - case NUMBER, INTEGER, BOOLEAN -> fieldSchema = Schema.create(fieldType.getAvroType()); + case INTEGER, NUMBER, NUMBER_INT, NUMBER_LONG, NUMBER_FLOAT, BOOLEAN -> fieldSchema = Schema.create(fieldType.getAvroType()); case STRING -> { if (fieldDefinition.has("format")) { final String format = fieldDefinition.get("format").asText(); diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/S3AvroFormatConfig.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/S3AvroFormatConfig.java index 3f8aae7ed646..2a086a32b717 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/S3AvroFormatConfig.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/S3AvroFormatConfig.java @@ -4,10 +4,7 @@ package io.airbyte.integrations.destination.s3.avro; -import static io.airbyte.integrations.destination.s3.S3DestinationConstants.PART_SIZE_MB_ARG_NAME; - import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.integrations.destination.s3.S3DestinationConstants; import io.airbyte.integrations.destination.s3.S3Format; import io.airbyte.integrations.destination.s3.S3FormatConfig; import org.apache.avro.file.CodecFactory; @@ -17,18 +14,13 @@ public class S3AvroFormatConfig implements S3FormatConfig { public static final String DEFAULT_SUFFIX = ".avro"; private final CodecFactory codecFactory; - private final Long partSize; - public S3AvroFormatConfig(final CodecFactory codecFactory, final long partSize) { + public S3AvroFormatConfig(final CodecFactory codecFactory) { this.codecFactory = codecFactory; - this.partSize = partSize; } public S3AvroFormatConfig(final JsonNode formatConfig) { this.codecFactory = parseCodecConfig(formatConfig.get("compression_codec")); - this.partSize = formatConfig.get(PART_SIZE_MB_ARG_NAME) != null - ? formatConfig.get(PART_SIZE_MB_ARG_NAME).asLong() - : S3DestinationConstants.DEFAULT_PART_SIZE_MB; } public static CodecFactory parseCodecConfig(final JsonNode compressionCodecConfig) { @@ -96,10 +88,6 @@ public CodecFactory getCodecFactory() { return codecFactory; } - public Long getPartSize() { - return partSize; - } - @Override public String getFileExtension() { return DEFAULT_SUFFIX; diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/S3AvroWriter.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/S3AvroWriter.java index 8bd3676a1474..9eece89ed9f8 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/S3AvroWriter.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/S3AvroWriter.java @@ -56,7 +56,6 @@ public S3AvroWriter(final S3DestinationConfig config, this.avroRecordFactory = new AvroRecordFactory(schema, converter); this.uploadManager = StreamTransferManagerFactory .create(config.getBucketName(), objectKey, s3Client) - .setPartSize(config.getFormatConfig().getPartSize()) .get(); // We only need one output stream as we only have one input stream. This is reasonably performant. this.outputStream = uploadManager.getMultiPartOutputStreams().get(0); diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/csv/S3CsvFormatConfig.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/csv/S3CsvFormatConfig.java index 4ca449379109..d6ff3c132146 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/csv/S3CsvFormatConfig.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/csv/S3CsvFormatConfig.java @@ -6,11 +6,9 @@ import static io.airbyte.integrations.destination.s3.S3DestinationConstants.COMPRESSION_ARG_NAME; import static io.airbyte.integrations.destination.s3.S3DestinationConstants.DEFAULT_COMPRESSION_TYPE; -import static io.airbyte.integrations.destination.s3.S3DestinationConstants.PART_SIZE_MB_ARG_NAME; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.integrations.destination.s3.S3DestinationConstants; import io.airbyte.integrations.destination.s3.S3Format; import io.airbyte.integrations.destination.s3.S3FormatConfig; import io.airbyte.integrations.destination.s3.util.CompressionType; @@ -50,24 +48,18 @@ public String getValue() { } private final Flattening flattening; - @Deprecated - private final Long partSize; private final CompressionType compressionType; public S3CsvFormatConfig(final JsonNode formatConfig) { this( Flattening.fromValue(formatConfig.has("flattening") ? formatConfig.get("flattening").asText() : Flattening.NO.value), - formatConfig.has(PART_SIZE_MB_ARG_NAME) - ? formatConfig.get(PART_SIZE_MB_ARG_NAME).asLong() - : S3DestinationConstants.DEFAULT_PART_SIZE_MB, formatConfig.has(COMPRESSION_ARG_NAME) ? CompressionTypeHelper.parseCompressionType(formatConfig.get(COMPRESSION_ARG_NAME)) : DEFAULT_COMPRESSION_TYPE); } - public S3CsvFormatConfig(final Flattening flattening, final Long partSize, final CompressionType compressionType) { + public S3CsvFormatConfig(final Flattening flattening, final CompressionType compressionType) { this.flattening = flattening; - this.partSize = partSize; this.compressionType = compressionType; } @@ -80,11 +72,6 @@ public Flattening getFlattening() { return flattening; } - @Override - public Long getPartSize() { - return partSize; - } - @Override public String getFileExtension() { return CSV_SUFFIX + compressionType.getFileExtension(); @@ -98,7 +85,6 @@ public CompressionType getCompressionType() { public String toString() { return "S3CsvFormatConfig{" + "flattening=" + flattening + - ", partSize=" + partSize + ", compression=" + compressionType.name() + '}'; } diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/csv/S3CsvWriter.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/csv/S3CsvWriter.java index 15ace28740b7..cce2da71e33f 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/csv/S3CsvWriter.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/csv/S3CsvWriter.java @@ -61,7 +61,6 @@ private S3CsvWriter(final S3DestinationConfig config, this.uploadManager = StreamTransferManagerFactory .create(config.getBucketName(), objectKey, s3Client) - .setPartSize(config.getFormatConfig().getPartSize()) .get() .numUploadThreads(uploadThreads) .queueCapacity(queueCapacity); diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/jsonl/S3JsonlFormatConfig.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/jsonl/S3JsonlFormatConfig.java index 93c10dc677c2..3904da3d8de1 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/jsonl/S3JsonlFormatConfig.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/jsonl/S3JsonlFormatConfig.java @@ -6,10 +6,8 @@ import static io.airbyte.integrations.destination.s3.S3DestinationConstants.COMPRESSION_ARG_NAME; import static io.airbyte.integrations.destination.s3.S3DestinationConstants.DEFAULT_COMPRESSION_TYPE; -import static io.airbyte.integrations.destination.s3.S3DestinationConstants.PART_SIZE_MB_ARG_NAME; import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.integrations.destination.s3.S3DestinationConstants; import io.airbyte.integrations.destination.s3.S3Format; import io.airbyte.integrations.destination.s3.S3FormatConfig; import io.airbyte.integrations.destination.s3.util.CompressionType; @@ -20,13 +18,9 @@ public class S3JsonlFormatConfig implements S3FormatConfig { public static final String JSONL_SUFFIX = ".jsonl"; - private final Long partSize; private final CompressionType compressionType; public S3JsonlFormatConfig(final JsonNode formatConfig) { - this.partSize = formatConfig.has(PART_SIZE_MB_ARG_NAME) - ? formatConfig.get(PART_SIZE_MB_ARG_NAME).asLong() - : S3DestinationConstants.DEFAULT_PART_SIZE_MB; this.compressionType = formatConfig.has(COMPRESSION_ARG_NAME) ? CompressionTypeHelper.parseCompressionType(formatConfig.get(COMPRESSION_ARG_NAME)) : DEFAULT_COMPRESSION_TYPE; @@ -37,10 +31,6 @@ public S3Format getFormat() { return S3Format.JSONL; } - public Long getPartSize() { - return partSize; - } - @Override public String getFileExtension() { return JSONL_SUFFIX + compressionType.getFileExtension(); @@ -53,7 +43,6 @@ public CompressionType getCompressionType() { @Override public String toString() { return "S3JsonlFormatConfig{" + - ", partSize=" + partSize + ", compression=" + compressionType.name() + '}'; } @@ -67,12 +56,12 @@ public boolean equals(final Object o) { return false; } final S3JsonlFormatConfig that = (S3JsonlFormatConfig) o; - return Objects.equals(partSize, that.partSize) && Objects.equals(compressionType, that.compressionType); + return Objects.equals(compressionType, that.compressionType); } @Override public int hashCode() { - return Objects.hash(partSize, compressionType); + return Objects.hash(compressionType); } } diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/jsonl/S3JsonlWriter.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/jsonl/S3JsonlWriter.java index 9d8e79a06e12..b415100a4e77 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/jsonl/S3JsonlWriter.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/jsonl/S3JsonlWriter.java @@ -54,7 +54,6 @@ public S3JsonlWriter(final S3DestinationConfig config, this.uploadManager = StreamTransferManagerFactory .create(config.getBucketName(), objectKey, s3Client) - .setPartSize(config.getFormatConfig().getPartSize()) .get(); // We only need one output stream as we only have one input stream. This is reasonably performant. this.outputStream = uploadManager.getMultiPartOutputStreams().get(0); diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/parquet/S3ParquetFormatConfig.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/parquet/S3ParquetFormatConfig.java index 88e389e65d9b..77cf6656a54d 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/parquet/S3ParquetFormatConfig.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/parquet/S3ParquetFormatConfig.java @@ -5,7 +5,6 @@ package io.airbyte.integrations.destination.s3.parquet; import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.integrations.destination.s3.S3DestinationConstants; import io.airbyte.integrations.destination.s3.S3Format; import io.airbyte.integrations.destination.s3.S3FormatConfig; import org.apache.parquet.hadoop.metadata.CompressionCodecName; @@ -42,12 +41,6 @@ public S3Format getFormat() { return S3Format.PARQUET; } - @Override - public Long getPartSize() { - // not applicable for Parquet format - return Integer.toUnsignedLong(S3DestinationConstants.DEFAULT_PART_SIZE_MB); - } - @Override public String getFileExtension() { return PARQUET_SUFFIX; diff --git a/airbyte-integrations/connectors/destination-s3/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-s3/src/main/resources/spec.json index c0933c23c3b0..01c8f64a932a 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/destination-s3/src/main/resources/spec.json @@ -14,7 +14,6 @@ "s3_bucket_region", "format" ], - "additionalProperties": false, "properties": { "access_key_id": { "type": "string", diff --git a/airbyte-integrations/connectors/destination-s3/src/test-integration/java/io/airbyte/integrations/destination/s3/S3AvroDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-s3/src/test-integration/java/io/airbyte/integrations/destination/s3/S3AvroDestinationAcceptanceTest.java index 869e7b142a24..b8895f1f3e79 100644 --- a/airbyte-integrations/connectors/destination-s3/src/test-integration/java/io/airbyte/integrations/destination/s3/S3AvroDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-s3/src/test-integration/java/io/airbyte/integrations/destination/s3/S3AvroDestinationAcceptanceTest.java @@ -13,16 +13,19 @@ import io.airbyte.integrations.destination.s3.avro.JsonFieldNameUpdater; import io.airbyte.integrations.destination.s3.util.AvroRecordHelper; import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; +import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Set; +import org.apache.avro.Schema.Type; import org.apache.avro.file.DataFileReader; import org.apache.avro.file.SeekableByteArrayInput; import org.apache.avro.generic.GenericData; import org.apache.avro.generic.GenericData.Record; import org.apache.avro.generic.GenericDatumReader; -public class S3AvroDestinationAcceptanceTest extends S3DestinationAcceptanceTest { +public class S3AvroDestinationAcceptanceTest extends S3AvroParquetDestinationAcceptanceTest { protected S3AvroDestinationAcceptanceTest() { super(S3Format.AVRO); @@ -73,4 +76,25 @@ protected TestDataComparator getTestDataComparator() { return new S3AvroParquetTestDataComparator(); } + @Override + protected Map> retrieveDataTypesFromPersistedFiles(final String streamName, final String namespace) throws Exception { + + final List objectSummaries = getAllSyncedObjects(streamName, namespace); + Map> resultDataTypes = new HashMap<>(); + + for (final S3ObjectSummary objectSummary : objectSummaries) { + final S3Object object = s3Client.getObject(objectSummary.getBucketName(), objectSummary.getKey()); + try (final DataFileReader dataFileReader = new DataFileReader<>( + new SeekableByteArrayInput(object.getObjectContent().readAllBytes()), + new GenericDatumReader<>())) { + while (dataFileReader.hasNext()) { + final GenericData.Record record = dataFileReader.next(); + Map> actualDataTypes = getTypes(record); + resultDataTypes.putAll(actualDataTypes); + } + } + } + return resultDataTypes; + } + } diff --git a/airbyte-integrations/connectors/destination-s3/src/test-integration/java/io/airbyte/integrations/destination/s3/S3AvroParquetDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-s3/src/test-integration/java/io/airbyte/integrations/destination/s3/S3AvroParquetDestinationAcceptanceTest.java new file mode 100644 index 000000000000..96dd7b96db97 --- /dev/null +++ b/airbyte-integrations/connectors/destination-s3/src/test-integration/java/io/airbyte/integrations/destination/s3/S3AvroParquetDestinationAcceptanceTest.java @@ -0,0 +1,145 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.s3; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.resources.MoreResources; +import io.airbyte.integrations.destination.s3.avro.JsonSchemaType; +import io.airbyte.integrations.standardtest.destination.NumberDataTypeTestArgumentProvider; +import io.airbyte.protocol.models.AirbyteCatalog; +import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.CatalogHelpers; +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.StreamSupport; +import org.apache.avro.Schema; +import org.apache.avro.Schema.Field; +import org.apache.avro.Schema.Type; +import org.apache.avro.generic.GenericData.Record; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; + +public abstract class S3AvroParquetDestinationAcceptanceTest extends S3DestinationAcceptanceTest { + + protected S3AvroParquetDestinationAcceptanceTest(S3Format s3Format) { + super(s3Format); + } + + @ParameterizedTest + @ArgumentsSource(NumberDataTypeTestArgumentProvider.class) + public void testNumberDataType(String catalogFileName, String messagesFileName) throws Exception { + final AirbyteCatalog catalog = readCatalogFromFile(catalogFileName); + final List messages = readMessagesFromFile(messagesFileName); + + final JsonNode config = getConfig(); + final String defaultSchema = getDefaultSchema(config); + final ConfiguredAirbyteCatalog configuredCatalog = CatalogHelpers.toDefaultConfiguredCatalog(catalog); + runSyncAndVerifyStateOutput(config, messages, configuredCatalog, false); + + for (final AirbyteStream stream : catalog.getStreams()) { + final String streamName = stream.getName(); + final String schema = stream.getNamespace() != null ? stream.getNamespace() : defaultSchema; + + Map> actualSchemaTypes = retrieveDataTypesFromPersistedFiles(streamName, schema); + Map> expectedSchemaTypes = retrieveExpectedDataTypes(stream); + + assertEquals(expectedSchemaTypes, actualSchemaTypes); + } + } + + private Map> retrieveExpectedDataTypes(AirbyteStream stream) { + Iterable iterableNames = () -> stream.getJsonSchema().get("properties").fieldNames(); + Map nameToNode = StreamSupport.stream(iterableNames.spliterator(), false) + .collect(Collectors.toMap( + Function.identity(), + name -> getJsonNode(stream, name))); + + return nameToNode + .entrySet() + .stream() + .collect(Collectors.toMap( + Entry::getKey, + entry -> getExpectedSchemaType(entry.getValue()))); + } + + private JsonNode getJsonNode(AirbyteStream stream, String name) { + JsonNode properties = stream.getJsonSchema().get("properties"); + if (properties.size() == 1) { + return properties.get("data"); + } + return properties.get(name).get("items"); + } + + private Set getExpectedSchemaType(JsonNode fieldDefinition) { + final JsonNode typeProperty = fieldDefinition.get("type"); + final JsonNode airbyteTypeProperty = fieldDefinition.get("airbyte_type"); + final String airbyteTypePropertyText = airbyteTypeProperty == null ? null : airbyteTypeProperty.asText(); + return Arrays.stream(JsonSchemaType.values()) + .filter( + value -> value.getJsonSchemaType().equals(typeProperty.asText()) && compareAirbyteTypes(airbyteTypePropertyText, value)) + .map(JsonSchemaType::getAvroType) + .collect(Collectors.toSet()); + } + + private boolean compareAirbyteTypes(String airbyteTypePropertyText, JsonSchemaType value) { + if (airbyteTypePropertyText == null) { + return value.getJsonSchemaAirbyteType() == null; + } + return airbyteTypePropertyText.equals(value.getJsonSchemaAirbyteType()); + } + + private AirbyteCatalog readCatalogFromFile(final String catalogFilename) throws IOException { + return Jsons.deserialize(MoreResources.readResource(catalogFilename), AirbyteCatalog.class); + } + + private List readMessagesFromFile(final String messagesFilename) throws IOException { + return MoreResources.readResource(messagesFilename).lines() + .map(record -> Jsons.deserialize(record, AirbyteMessage.class)).collect(Collectors.toList()); + } + + protected abstract Map> retrieveDataTypesFromPersistedFiles(final String streamName, final String namespace) throws Exception; + + protected Map> getTypes(Record record) { + + List fieldList = record + .getSchema() + .getFields() + .stream() + .filter(field -> !field.name().startsWith("_airbyte")) + .toList(); + + if (fieldList.size() == 1) { + return fieldList + .stream() + .collect( + Collectors.toMap( + Field::name, + field -> field.schema().getTypes().stream().map(Schema::getType).filter(type -> !type.equals(Type.NULL)) + .collect(Collectors.toSet()))); + } else { + return fieldList + .stream() + .collect( + Collectors.toMap( + Field::name, + field -> field.schema().getTypes() + .stream().filter(type -> !type.getType().equals(Type.NULL)) + .flatMap(type -> type.getElementType().getTypes().stream()).map(Schema::getType).filter(type -> !type.equals(Type.NULL)) + .collect(Collectors.toSet()))); + } + } + +} diff --git a/airbyte-integrations/connectors/destination-s3/src/test-integration/java/io/airbyte/integrations/destination/s3/S3ParquetDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-s3/src/test-integration/java/io/airbyte/integrations/destination/s3/S3ParquetDestinationAcceptanceTest.java index fab39c0d3240..fe3924019d86 100644 --- a/airbyte-integrations/connectors/destination-s3/src/test-integration/java/io/airbyte/integrations/destination/s3/S3ParquetDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-s3/src/test-integration/java/io/airbyte/integrations/destination/s3/S3ParquetDestinationAcceptanceTest.java @@ -17,15 +17,19 @@ import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; +import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Set; +import org.apache.avro.Schema.Type; import org.apache.avro.generic.GenericData; +import org.apache.avro.generic.GenericData.Record; import org.apache.hadoop.conf.Configuration; import org.apache.parquet.avro.AvroReadSupport; import org.apache.parquet.hadoop.ParquetReader; -public class S3ParquetDestinationAcceptanceTest extends S3DestinationAcceptanceTest { +public class S3ParquetDestinationAcceptanceTest extends S3AvroParquetDestinationAcceptanceTest { protected S3ParquetDestinationAcceptanceTest() { super(S3Format.PARQUET); @@ -77,4 +81,30 @@ protected TestDataComparator getTestDataComparator() { return new S3AvroParquetTestDataComparator(); } + @Override + protected Map> retrieveDataTypesFromPersistedFiles(final String streamName, final String namespace) throws Exception { + + final List objectSummaries = getAllSyncedObjects(streamName, namespace); + final Map> resultDataTypes = new HashMap<>(); + + for (final S3ObjectSummary objectSummary : objectSummaries) { + final S3Object object = s3Client.getObject(objectSummary.getBucketName(), objectSummary.getKey()); + final URI uri = new URI(String.format("s3a://%s/%s", object.getBucketName(), object.getKey())); + final var path = new org.apache.hadoop.fs.Path(uri); + final Configuration hadoopConfig = S3ParquetWriter.getHadoopConfig(config); + + try (final ParquetReader parquetReader = ParquetReader.builder(new AvroReadSupport<>(), path) + .withConf(hadoopConfig) + .build()) { + GenericData.Record record; + while ((record = parquetReader.read()) != null) { + Map> actualDataTypes = getTypes(record); + resultDataTypes.putAll(actualDataTypes); + } + } + } + + return resultDataTypes; + } + } diff --git a/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/S3DestinationConfigTest.java b/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/S3DestinationConfigTest.java index e81900c78683..c802b16db64b 100644 --- a/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/S3DestinationConfigTest.java +++ b/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/S3DestinationConfigTest.java @@ -13,7 +13,6 @@ class S3DestinationConfigTest { private static final S3DestinationConfig CONFIG = S3DestinationConfig.create("test-bucket", "test-path", "test-region") .withEndpoint("test-endpoint") - .withPartSize(19) .withPathFormat("${STREAM_NAME}/${NAMESPACE}") .withAccessKeyCredential("test-key", "test-secret") .get(); @@ -29,7 +28,6 @@ public void testCreateAndModify() { final String newBucketPath = "new-path"; final String newBucketRegion = "new-region"; final String newEndpoint = "new-endpoint"; - final int newPartSize = 29; final String newKey = "new-key"; final String newSecret = "new-secret"; @@ -39,7 +37,6 @@ public void testCreateAndModify() { .withBucketRegion(newBucketRegion) .withEndpoint(newEndpoint) .withAccessKeyCredential(newKey, newSecret) - .withPartSize(newPartSize) .get(); assertNotEquals(CONFIG, modifiedConfig); diff --git a/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/avro/JsonSchemaTypeTest.java b/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/avro/JsonSchemaTypeTest.java new file mode 100644 index 000000000000..be27d9802ae4 --- /dev/null +++ b/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/avro/JsonSchemaTypeTest.java @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.s3.avro; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.stream.Stream; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; + +public class JsonSchemaTypeTest { + + @ParameterizedTest + @ArgumentsSource(JsonSchemaTypeProvider.class) + public void testFromJsonSchemaType(String type, String airbyteType, JsonSchemaType expectedJsonSchemaType) { + assertEquals( + expectedJsonSchemaType, + JsonSchemaType.fromJsonSchemaType(type, airbyteType)); + } + + public static class JsonSchemaTypeProvider implements ArgumentsProvider { + + @Override + public Stream provideArguments(ExtensionContext context) throws Exception { + return Stream.of( + Arguments.of("number", "integer", JsonSchemaType.NUMBER_INT), + Arguments.of("number", "big_integer", JsonSchemaType.NUMBER_LONG), + Arguments.of("number", "float", JsonSchemaType.NUMBER_FLOAT), + Arguments.of("number", null, JsonSchemaType.NUMBER), + Arguments.of("string", null, JsonSchemaType.STRING), + Arguments.of("integer", null, JsonSchemaType.INTEGER), + Arguments.of("boolean", null, JsonSchemaType.BOOLEAN), + Arguments.of("null", null, JsonSchemaType.NULL), + Arguments.of("object", null, JsonSchemaType.OBJECT), + Arguments.of("array", null, JsonSchemaType.ARRAY), + Arguments.of("combined", null, JsonSchemaType.COMBINED)); + } + + } + +} diff --git a/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/avro/S3AvroFormatConfigTest.java b/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/avro/S3AvroFormatConfigTest.java index 8dccdac4391b..496eb5280f47 100644 --- a/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/avro/S3AvroFormatConfigTest.java +++ b/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/avro/S3AvroFormatConfigTest.java @@ -5,6 +5,7 @@ package io.airbyte.integrations.destination.s3.avro; import static com.amazonaws.services.s3.internal.Constants.MB; +import static io.airbyte.integrations.destination.s3.util.StreamTransferManagerFactory.DEFAULT_PART_SIZE_MB; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.fail; @@ -13,7 +14,6 @@ import com.google.common.collect.Lists; import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.destination.s3.S3DestinationConfig; -import io.airbyte.integrations.destination.s3.S3DestinationConstants; import io.airbyte.integrations.destination.s3.S3FormatConfig; import io.airbyte.integrations.destination.s3.util.ConfigTestUtils; import io.airbyte.integrations.destination.s3.util.StreamTransferManagerFactory; @@ -106,8 +106,7 @@ public void testParseCodecConfigInvalid() { public void testHandlePartSizeConfig() throws IllegalAccessException { final JsonNode config = ConfigTestUtils.getBaseConfig(Jsons.deserialize("{\n" - + " \"format_type\": \"AVRO\",\n" - + " \"part_size_mb\": 6\n" + + " \"format_type\": \"AVRO\"\n" + "}")); final S3DestinationConfig s3DestinationConfig = S3DestinationConfig @@ -116,15 +115,13 @@ public void testHandlePartSizeConfig() throws IllegalAccessException { final S3FormatConfig formatConfig = s3DestinationConfig.getFormatConfig(); assertEquals("AVRO", formatConfig.getFormat().name()); - assertEquals(6, formatConfig.getPartSize()); // Assert that is set properly in config final StreamTransferManager streamTransferManager = StreamTransferManagerFactory .create(s3DestinationConfig.getBucketName(), "objectKey", null) - .setPartSize(s3DestinationConfig.getFormatConfig().getPartSize()) .get(); final Integer partSizeBytes = (Integer) FieldUtils.readField(streamTransferManager, "partSize", true); - assertEquals(MB * 6, partSizeBytes); + assertEquals(MB * DEFAULT_PART_SIZE_MB, partSizeBytes); } @Test @@ -140,11 +137,10 @@ public void testHandleAbsenceOfPartSizeConfig() throws IllegalAccessException { final StreamTransferManager streamTransferManager = StreamTransferManagerFactory .create(s3DestinationConfig.getBucketName(), "objectKey", null) - .setPartSize(s3DestinationConfig.getFormatConfig().getPartSize()) .get(); final Integer partSizeBytes = (Integer) FieldUtils.readField(streamTransferManager, "partSize", true); - assertEquals(MB * S3DestinationConstants.DEFAULT_PART_SIZE_MB, partSizeBytes); + assertEquals(MB * DEFAULT_PART_SIZE_MB, partSizeBytes); } } diff --git a/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/csv/S3CsvFormatConfigTest.java b/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/csv/S3CsvFormatConfigTest.java index 76df86a146f6..f087d4d01316 100644 --- a/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/csv/S3CsvFormatConfigTest.java +++ b/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/csv/S3CsvFormatConfigTest.java @@ -5,6 +5,7 @@ package io.airbyte.integrations.destination.s3.csv; import static com.amazonaws.services.s3.internal.Constants.MB; +import static io.airbyte.integrations.destination.s3.util.StreamTransferManagerFactory.DEFAULT_PART_SIZE_MB; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -42,8 +43,7 @@ public void testHandlePartSizeConfig() throws IllegalAccessException { final JsonNode config = ConfigTestUtils.getBaseConfig(Jsons.deserialize("{\n" + " \"format_type\": \"CSV\",\n" - + " \"flattening\": \"Root level flattening\",\n" - + " \"part_size_mb\": 6\n" + + " \"flattening\": \"Root level flattening\"\n" + "}")); final S3DestinationConfig s3DestinationConfig = S3DestinationConfig @@ -52,15 +52,13 @@ public void testHandlePartSizeConfig() throws IllegalAccessException { final S3FormatConfig formatConfig = s3DestinationConfig.getFormatConfig(); assertEquals("CSV", formatConfig.getFormat().name()); - assertEquals(6, formatConfig.getPartSize()); // Assert that is set properly in config final StreamTransferManager streamTransferManager = StreamTransferManagerFactory .create(s3DestinationConfig.getBucketName(), "objectKey", null) - .setPartSize(s3DestinationConfig.getFormatConfig().getPartSize()) .get(); final Integer partSizeBytes = (Integer) FieldUtils.readField(streamTransferManager, "partSize", true); - assertEquals(MB * 6, partSizeBytes); + assertEquals(MB * DEFAULT_PART_SIZE_MB, partSizeBytes); } @Test @@ -77,11 +75,10 @@ public void testHandleAbsenceOfPartSizeConfig() throws IllegalAccessException { final StreamTransferManager streamTransferManager = StreamTransferManagerFactory .create(s3DestinationConfig.getBucketName(), "objectKey", null) - .setPartSize(s3DestinationConfig.getFormatConfig().getPartSize()) .get(); final Integer partSizeBytes = (Integer) FieldUtils.readField(streamTransferManager, "partSize", true); - assertEquals(MB * S3DestinationConstants.DEFAULT_PART_SIZE_MB, partSizeBytes); + assertEquals(MB * DEFAULT_PART_SIZE_MB, partSizeBytes); } @Test diff --git a/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/csv/S3CsvWriterTest.java b/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/csv/S3CsvWriterTest.java index 56e3a59b8db3..42a6ee1ebade 100644 --- a/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/csv/S3CsvWriterTest.java +++ b/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/csv/S3CsvWriterTest.java @@ -53,8 +53,7 @@ class S3CsvWriterTest { .withNamespace("fake-namespace")); private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); - private static final int PART_SIZE = 7; - private static final S3CsvFormatConfig CSV_FORMAT_CONFIG = new S3CsvFormatConfig(Flattening.NO, (long) PART_SIZE, CompressionType.NO_COMPRESSION); + private static final S3CsvFormatConfig CSV_FORMAT_CONFIG = new S3CsvFormatConfig(Flattening.NO, CompressionType.NO_COMPRESSION); private static final S3DestinationConfig CONFIG = S3DestinationConfig.create( "fake-bucket", @@ -62,7 +61,6 @@ class S3CsvWriterTest { "fake-region") .withEndpoint("fake-endpoint") .withAccessKeyCredential("fake-access-key-id", "fake-secret-access-key") - .withPartSize(PART_SIZE) .withFormatConfig(CSV_FORMAT_CONFIG) .get(); @@ -162,7 +160,6 @@ public void createsExactlyOneUpload() throws IOException { final StreamTransferManager manager = streamTransferManagerMockedConstruction.constructed().get(0); final StreamTransferManagerArguments args = streamTransferManagerConstructorArguments.get(0); - verify(manager).partSize(PART_SIZE); verify(manager).numUploadThreads(UPLOAD_THREADS); verify(manager).queueCapacity(QUEUE_CAPACITY); assertEquals("fake-bucket", args.bucket); @@ -255,7 +252,6 @@ public void writesContentsCorrectly_when_stagingDatabaseConfig() throws IOExcept "fake-region") .withEndpoint("fake-endpoint") .withAccessKeyCredential("fake-access-key-id", "fake-secret-access-key") - .withPartSize(PART_SIZE) .withFormatConfig(CSV_FORMAT_CONFIG) .get(); final S3CsvWriter writer = new Builder( diff --git a/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/jsonl/S3JsonlFormatConfigTest.java b/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/jsonl/S3JsonlFormatConfigTest.java index 9e092b114d72..3a9c97199097 100644 --- a/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/jsonl/S3JsonlFormatConfigTest.java +++ b/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/jsonl/S3JsonlFormatConfigTest.java @@ -5,13 +5,13 @@ package io.airbyte.integrations.destination.s3.jsonl; import static com.amazonaws.services.s3.internal.Constants.MB; +import static io.airbyte.integrations.destination.s3.util.StreamTransferManagerFactory.DEFAULT_PART_SIZE_MB; import static org.junit.jupiter.api.Assertions.assertEquals; import alex.mojaki.s3upload.StreamTransferManager; import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.destination.s3.S3DestinationConfig; -import io.airbyte.integrations.destination.s3.S3DestinationConstants; import io.airbyte.integrations.destination.s3.S3FormatConfig; import io.airbyte.integrations.destination.s3.util.ConfigTestUtils; import io.airbyte.integrations.destination.s3.util.StreamTransferManagerFactory; @@ -26,8 +26,7 @@ public class S3JsonlFormatConfigTest { public void testHandlePartSizeConfig() throws IllegalAccessException { final JsonNode config = ConfigTestUtils.getBaseConfig(Jsons.deserialize("{\n" - + " \"format_type\": \"JSONL\",\n" - + " \"part_size_mb\": 6\n" + + " \"format_type\": \"JSONL\"\n" + "}")); final S3DestinationConfig s3DestinationConfig = S3DestinationConfig @@ -36,16 +35,14 @@ public void testHandlePartSizeConfig() throws IllegalAccessException { final S3FormatConfig formatConfig = s3DestinationConfig.getFormatConfig(); assertEquals("JSONL", formatConfig.getFormat().name()); - assertEquals(6, formatConfig.getPartSize()); // Assert that is set properly in config final StreamTransferManager streamTransferManager = StreamTransferManagerFactory .create(s3DestinationConfig.getBucketName(), "objectKey", null) - .setPartSize(s3DestinationConfig.getFormatConfig().getPartSize()) .get(); final Integer partSizeBytes = (Integer) FieldUtils.readField(streamTransferManager, "partSize", true); - assertEquals(MB * 6, partSizeBytes); + assertEquals(MB * DEFAULT_PART_SIZE_MB, partSizeBytes); } @Test @@ -61,11 +58,10 @@ public void testHandleAbsenceOfPartSizeConfig() throws IllegalAccessException { final StreamTransferManager streamTransferManager = StreamTransferManagerFactory .create(s3DestinationConfig.getBucketName(), "objectKey", null) - .setPartSize(s3DestinationConfig.getFormatConfig().getPartSize()) .get(); final Integer partSizeBytes = (Integer) FieldUtils.readField(streamTransferManager, "partSize", true); - assertEquals(MB * S3DestinationConstants.DEFAULT_PART_SIZE_MB, partSizeBytes); + assertEquals(MB * DEFAULT_PART_SIZE_MB, partSizeBytes); } } diff --git a/airbyte-integrations/connectors/destination-s3/src/test/resources/parquet/json_schema_converter/type_conversion_test_cases.json b/airbyte-integrations/connectors/destination-s3/src/test/resources/parquet/json_schema_converter/type_conversion_test_cases.json index fabee9775aa8..2262b4ff76e6 100644 --- a/airbyte-integrations/connectors/destination-s3/src/test/resources/parquet/json_schema_converter/type_conversion_test_cases.json +++ b/airbyte-integrations/connectors/destination-s3/src/test/resources/parquet/json_schema_converter/type_conversion_test_cases.json @@ -9,16 +9,26 @@ { "fieldName": "integer_field", "jsonFieldSchema": { - "type": "integer" + "type": "number", + "airbyte_type": "integer" }, "avroFieldType": ["null", "int"] }, { - "fieldName": "number_field", + "fieldName": "big_integer_field", "jsonFieldSchema": { - "type": "number" + "type": "number", + "airbyte_type": "big_integer" }, - "avroFieldType": ["null", "double"] + "avroFieldType": ["null", "long"] + }, + { + "fieldName": "float_field", + "jsonFieldSchema": { + "type": "number", + "airbyte_type": "float" + }, + "avroFieldType": ["null", "float"] }, { "fieldName": "null_field", @@ -60,6 +70,10 @@ }, { "type": "number" + }, + { + "type": "number", + "airbyte_type": "big_integer" } ] }, @@ -67,7 +81,7 @@ "null", { "type": "array", - "items": ["null", "string", "double"] + "items": ["null", "string", "double", "long"] } ] }, @@ -79,6 +93,10 @@ "id": { "type": "integer" }, + "long_id": { + "type": "number", + "airbyte_type": "big_integer" + }, "node_id": { "type": ["null", "string"] } @@ -95,6 +113,11 @@ "type": ["null", "int"], "default": null }, + { + "name": "long_id", + "type": ["null", "long"], + "default": null + }, { "name": "node_id", "type": ["null", "string"], @@ -146,23 +169,35 @@ { "fieldName": "any_of_field", "jsonFieldSchema": { - "anyOf": [{ "type": "string" }, { "type": "integer" }] + "anyOf": [ + { "type": "string" }, + { "type": "integer" }, + { "type": "number" } + ] }, - "avroFieldType": ["null", "string", "int"] + "avroFieldType": ["null", "string", "int", "double"] }, { "fieldName": "all_of_field", "jsonFieldSchema": { - "allOf": [{ "type": "string" }, { "type": "integer" }] + "allOf": [ + { "type": "string" }, + { "type": "integer" }, + { "type": "number", "airbyte_type": "float" } + ] }, - "avroFieldType": ["null", "string", "int"] + "avroFieldType": ["null", "string", "int", "float"] }, { "fieldName": "one_of_field", "jsonFieldSchema": { - "oneOf": [{ "type": "string" }, { "type": "integer" }] + "oneOf": [ + { "type": "string" }, + { "type": "integer" }, + { "type": "number", "airbyte_type": "big_integer" } + ] }, - "avroFieldType": ["null", "string", "int"] + "avroFieldType": ["null", "string", "int", "long"] }, { "fieldName": "logical_type_date_time", diff --git a/airbyte-integrations/connectors/destination-scylla/Dockerfile b/airbyte-integrations/connectors/destination-scylla/Dockerfile index 822b6c15c8f3..f7e349e0c1b1 100644 --- a/airbyte-integrations/connectors/destination-scylla/Dockerfile +++ b/airbyte-integrations/connectors/destination-scylla/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-scylla COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.1 +LABEL io.airbyte.version=0.1.2 LABEL io.airbyte.name=airbyte/destination-scylla diff --git a/airbyte-integrations/connectors/destination-scylla/build.gradle b/airbyte-integrations/connectors/destination-scylla/build.gradle index a36868b9d2c9..9fcc858fe811 100644 --- a/airbyte-integrations/connectors/destination-scylla/build.gradle +++ b/airbyte-integrations/connectors/destination-scylla/build.gradle @@ -23,7 +23,7 @@ dependencies { // https://mvnrepository.com/artifact/org.assertj/assertj-core testImplementation "org.assertj:assertj-core:${assertVersion}" // https://mvnrepository.com/artifact/org.testcontainers/testcontainers - testImplementation libs.testcontainers + testImplementation libs.connectors.testcontainers.scylla diff --git a/airbyte-integrations/connectors/destination-snowflake/Dockerfile b/airbyte-integrations/connectors/destination-snowflake/Dockerfile index dc1c4a0ba295..bb18fdb8459c 100644 --- a/airbyte-integrations/connectors/destination-snowflake/Dockerfile +++ b/airbyte-integrations/connectors/destination-snowflake/Dockerfile @@ -20,5 +20,5 @@ RUN tar xf ${APPLICATION}.tar --strip-components=1 ENV ENABLE_SENTRY true -LABEL io.airbyte.version=0.4.28 +LABEL io.airbyte.version=0.4.30 LABEL io.airbyte.name=airbyte/destination-snowflake diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeGcsStagingSqlOperations.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeGcsStagingSqlOperations.java index b8e0fe522ec4..4df433f285cf 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeGcsStagingSqlOperations.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeGcsStagingSqlOperations.java @@ -28,6 +28,7 @@ import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.HashSet; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; @@ -190,7 +191,14 @@ public void dropStageIfExists(JdbcDatabase database, String stageName) throws Ex private void dropBucketObject() { if (!fullObjectKeys.isEmpty()) { - fullObjectKeys.forEach(this::removeBlob); + Iterator iterator = fullObjectKeys.iterator(); + while (iterator.hasNext()) { + String element = iterator.next(); + if (element != null) { + removeBlob(element); + iterator.remove(); + } + } } } diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-snowflake/src/main/resources/spec.json index f02f6172349e..9603ae207ace 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/resources/spec.json @@ -132,7 +132,6 @@ "oneOf": [ { "title": "Select another option", - "additionalProperties": false, "description": "Select another option", "required": ["method"], "properties": { @@ -147,7 +146,6 @@ }, { "title": "[Recommended] Internal Staging", - "additionalProperties": false, "description": "Writes large batches of records to a file, uploads the file to Snowflake, then uses

COPY INTO table
to upload the file. Recommended for large production workloads for better speed and scalability.", "required": ["method"], "properties": { @@ -162,7 +160,6 @@ }, { "title": "AWS S3 Staging", - "additionalProperties": false, "description": "Writes large batches of records to a file, uploads the file to S3, then uses
COPY INTO table
to upload the file. Recommended for large production workloads for better speed and scalability.", "required": [ "method", @@ -233,27 +230,19 @@ "airbyte_secret": true, "order": 4 }, - "part_size": { - "type": "integer", - "default": 5, - "examples": [5], - "description": "Optional. Increase this if syncing tables larger than 100GB. Only relevant for COPY. Files are streamed to S3 in parts. This determines the size of each part, in MBs. As S3 has a limit of 10,000 parts per file, part size affects the table size. This is 10MB by default, resulting in a default limit of 100GB tables. Note, a larger part size will result in larger memory requirements. A rule of thumb is to multiply the part size by 10 to get the memory requirement. Modify this with care.", - "title": "Stream Part Size", - "order": 5 - }, "purge_staging_data": { "title": "Purge Staging Files and Tables", "type": "boolean", "description": "Whether to delete the staging files from S3 after completing the sync. See the docs for details. Only relevant for COPY. Defaults to true.", "default": true, - "order": 6 + "order": 5 }, "encryption": { "title": "Encryption", "type": "object", "description": "How to encrypt the staging data", "default": { "encryption_type": "none" }, - "order": 7, + "order": 6, "oneOf": [ { "title": "No encryption", @@ -295,7 +284,6 @@ }, { "title": "GCS Staging", - "additionalProperties": false, "description": "Writes large batches of records to a file, uploads the file to GCS, then uses
COPY INTO table
to upload the file. Recommended for large production workloads for better speed and scalability.", "required": [ "method", @@ -338,7 +326,6 @@ }, { "title": "Azure Blob Storage Staging", - "additionalProperties": false, "description": "Writes large batches of records to a file, uploads the file to Azure Blob Storage, then uses
COPY INTO table
to upload the file. Recommended for large production workloads for better speed and scalability.", "required": [ "method", diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StreamCopierTest.java b/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StreamCopierTest.java index 9e5e555a6a2a..a899e7562d60 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StreamCopierTest.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StreamCopierTest.java @@ -30,8 +30,6 @@ class SnowflakeS3StreamCopierTest { - private static final int PART_SIZE = 5; - // equivalent to Thu, 09 Dec 2021 19:17:54 GMT private static final Timestamp UPLOAD_TIME = Timestamp.from(Instant.ofEpochMilli(1639077474000L)); @@ -52,7 +50,6 @@ public void setup() throws Exception { "fake-region") .withEndpoint("fake-endpoint") .withAccessKeyCredential("fake-access-key-id", "fake-secret-access-key") - .withPartSize(PART_SIZE) .get(); copier = (SnowflakeS3StreamCopier) new SnowflakeS3StreamCopierFactory().create( diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test/resources/copy_s3_config.json b/airbyte-integrations/connectors/destination-snowflake/src/test/resources/copy_s3_config.json index cd982b0f2805..bf55f9a2fd92 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test/resources/copy_s3_config.json +++ b/airbyte-integrations/connectors/destination-snowflake/src/test/resources/copy_s3_config.json @@ -13,7 +13,6 @@ "s3_bucket_name": "airbyte-snowflake-integration-tests", "s3_bucket_region": "us-east-2", "access_key_id": "test", - "secret_access_key": "test", - "part_size": 5 + "secret_access_key": "test" } } diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test/resources/copy_s3_encrypted_config.json b/airbyte-integrations/connectors/destination-snowflake/src/test/resources/copy_s3_encrypted_config.json index da8a8cbe1927..e0c5e3b62344 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test/resources/copy_s3_encrypted_config.json +++ b/airbyte-integrations/connectors/destination-snowflake/src/test/resources/copy_s3_encrypted_config.json @@ -14,7 +14,6 @@ "s3_bucket_region": "us-east-2", "access_key_id": "test", "secret_access_key": "test", - "part_size": 5, "encryption": { "encryption_type": "aes_cbc_envelope" } diff --git a/airbyte-integrations/connectors/source-amazon-ads/acceptance-test-config.yml b/airbyte-integrations/connectors/source-amazon-ads/acceptance-test-config.yml index e5c5223f6961..24644c5c3e2a 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-amazon-ads/acceptance-test-config.yml @@ -5,14 +5,18 @@ tests: spec: - spec_path: "integration_tests/spec.json" connection: - - config_path: "secrets/config.json" - status: "succeed" + # THIS TEST IS COMMENTED OUT BECAUSE OF + # https://advertising.amazon.com/API/docs/en-us/info/release-notes#sandbox-deprecation-on-june-28-2022 + # - config_path: "secrets/config.json" + # status: "succeed" - config_path: "secrets/config_test_account.json" status: "succeed" - config_path: "integration_tests/invalid_config.json" status: "failed" discovery: - - config_path: "secrets/config.json" + # THIS TEST IS COMMENTED OUT BECAUSE OF LOST ACCESS TO SANDBOX + # - config_path: "secrets/config.json" + - config_path: "secrets/config_test_account.json" basic_read: - config_path: "secrets/config_test_account.json" configured_catalog_path: "integration_tests/configured_catalog.json" @@ -22,18 +26,22 @@ tests: extra_fields: no exact_order: no extra_records: no - timeout_seconds: 900 - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog_sponsored_display.json" - empty_streams: ["sponsored_display_targetings"] - expect_records: - path: "integration_tests/expected_records_sponsored_display.txt" - extra_fields: no - exact_order: no - extra_records: no + timeout_seconds: 3600 + # THIS TEST IS COMMENTED OUT BECAUSE OF + # https://advertising.amazon.com/API/docs/en-us/info/release-notes#sandbox-deprecation-on-june-28-2022 + # - config_path: "secrets/config.json" + # configured_catalog_path: "integration_tests/configured_catalog_sponsored_display.json" + # empty_streams: ["sponsored_display_targetings"] + # expect_records: + # path: "integration_tests/expected_records_sponsored_display.txt" + # extra_fields: no + # exact_order: no + # extra_records: no full_refresh: - config_path: "secrets/config_test_account.json" configured_catalog_path: "integration_tests/configured_catalog.json" - timeout_seconds: 1800 - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog_sponsored_display.json" + timeout_seconds: 3600 + # THIS TEST IS COMMENTED OUT BECAUSE OF + # https://advertising.amazon.com/API/docs/en-us/info/release-notes#sandbox-deprecation-on-june-28-2022 + # - config_path: "secrets/config.json" + # configured_catalog_path: "integration_tests/configured_catalog_sponsored_display.json" diff --git a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/source.py b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/source.py index dc20266eb346..058413428679 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/source.py +++ b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/source.py @@ -6,13 +6,12 @@ from typing import Any, List, Mapping, Tuple from airbyte_cdk.logger import AirbyteLogger -from airbyte_cdk.models import ConnectorSpecification from airbyte_cdk.sources import AbstractSource from airbyte_cdk.sources.streams import Stream from airbyte_cdk.sources.streams.http.auth import Oauth2Authenticator +from .constants import AmazonAdsRegion from .schemas import Profile -from .spec import AmazonAdsConfig, advanced_auth from .streams import ( Profiles, SponsoredBrandsAdGroups, @@ -45,12 +44,12 @@ def check_connection(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> :param logger: logger object :return Tuple[bool, any]: (True, None) if the input config can be used to connect to the API successfully, (False, error) otherwise. """ - config = AmazonAdsConfig(**config) # Check connection by sending list of profiles request. Its most simple # request, not require additional parameters and usually has few data # in response body. # It doesnt support pagination so there is no sense of reading single # record, it would fetch all the data anyway. + self._set_defaults(config) Profiles(config, authenticator=self._make_authenticator(config)).get_all_profiles() return True, None @@ -59,7 +58,7 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: :param config: A Mapping of the user input configuration as defined in the connector spec. :return list of streams for current source """ - config = AmazonAdsConfig(**config) + self._set_defaults(config) auth = self._make_authenticator(config) stream_args = {"config": config, "authenticator": auth} # All data for individual Amazon Ads stream divided into sets of data for @@ -91,24 +90,21 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: ] return [profiles_stream, *[stream_class(**stream_args) for stream_class in non_profile_stream_classes]] - def spec(self, *args) -> ConnectorSpecification: - return ConnectorSpecification( - documentationUrl="https://docs.airbyte.com/integrations/sources/amazon-ads", - connectionSpecification=AmazonAdsConfig.schema(), - advanced_auth=advanced_auth, - ) - @staticmethod - def _make_authenticator(config: AmazonAdsConfig): + def _make_authenticator(config: Mapping[str, Any]): return Oauth2Authenticator( token_refresh_endpoint=TOKEN_URL, - client_id=config.client_id, - client_secret=config.client_secret, - refresh_token=config.refresh_token, + client_id=config["client_id"], + client_secret=config["client_secret"], + refresh_token=config["refresh_token"], ) @staticmethod - def _choose_profiles(config: AmazonAdsConfig, profiles: List[Profile]): - if not config.profiles: + def _set_defaults(config: Mapping[str, Any]): + config["region"] = AmazonAdsRegion.NA + + @staticmethod + def _choose_profiles(config: Mapping[str, Any], profiles: List[Profile]): + if not config.get("profiles"): return profiles - return list(filter(lambda profile: profile.profileId in config.profiles, profiles)) + return list(filter(lambda profile: profile.profileId in config["profiles"], profiles)) diff --git a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/spec.py b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/spec.py deleted file mode 100644 index bf7a598f0791..000000000000 --- a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/spec.py +++ /dev/null @@ -1,117 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - -from typing import List - -from airbyte_cdk.models import AdvancedAuth, AuthFlowType, OAuthConfigSpecification -from airbyte_cdk.sources.utils.schema_helpers import expand_refs -from pydantic import BaseModel, Extra, Field -from source_amazon_ads.constants import AmazonAdsRegion - - -class AmazonAdsConfig(BaseModel): - class Config: - title = "Amazon Ads Spec" - # ignore extra attributes during model initialization - # https://pydantic-docs.helpmanual.io/usage/model_config/ - extra = Extra.ignore - # it's default, but better to be more explicit - schema_extra = {"additionalProperties": True} - - auth_type: str = Field(default="oauth2.0", const=True, order=0) - - client_id: str = Field( - title="Client ID", - description='The client ID of your Amazon Ads developer application. See the docs for more information.', - order=1, - ) - - client_secret: str = Field( - title="Client Secret", - description='The client secret of your Amazon Ads developer application. See the docs for more information.', - airbyte_secret=True, - order=2, - ) - - refresh_token: str = Field( - title="Refresh Token", - description='Amazon Ads refresh token. See the docs for more information on how to obtain this token.', - airbyte_secret=True, - order=3, - ) - - region: AmazonAdsRegion = Field( - title="Region *", - description='Region to pull data from (EU/NA/FE/SANDBOX). See docs for more details.', - default=AmazonAdsRegion.NA, - order=4, - ) - - report_wait_timeout: int = Field( - title="Report Wait Timeout *", - description="Timeout duration in minutes for Reports. Default is 30 minutes.", - default=30, - examples=[30, 120], - order=5, - ) - - report_generation_max_retries: int = Field( - title="Report Generation Maximum Retries *", - description="Maximum retries Airbyte will attempt for fetching report data. Default is 5.", - default=5, - examples=[5, 10, 15], - order=6, - ) - - start_date: str = Field( - None, - title="Start Date (Optional)", - description="The Start date for collecting reports, should not be more than 60 days in the past. In YYYY-MM-DD format", - examples=["2022-10-10", "2022-10-22"], - order=7, - ) - - profiles: List[int] = Field( - None, - title="Profile IDs (Optional)", - description='Profile IDs you want to fetch data for. See docs for more details.', - order=8, - ) - - @classmethod - def schema(cls, **kwargs): - schema = super().schema(**kwargs) - expand_refs(schema) - # Transform pydantic generated enum for region - if schema["properties"]["region"].get("allOf"): - schema["properties"]["region"] = {**schema["properties"]["region"]["allOf"][0], **schema["properties"]["region"]} - schema["properties"]["region"].pop("allOf") - return schema - - -advanced_auth = AdvancedAuth( - auth_flow_type=AuthFlowType.oauth2_0, - predicate_key=["auth_type"], - predicate_value="oauth2.0", - oauth_config_specification=OAuthConfigSpecification( - complete_oauth_output_specification={ - "type": "object", - "additionalProperties": False, - "properties": {"refresh_token": {"type": "string", "path_in_connector_config": ["refresh_token"]}}, - }, - complete_oauth_server_input_specification={ - "type": "object", - "additionalProperties": False, - "properties": {"client_id": {"type": "string"}, "client_secret": {"type": "string"}}, - }, - complete_oauth_server_output_specification={ - "type": "object", - "additionalProperties": False, - "properties": { - "client_id": {"type": "string", "path_in_connector_config": ["client_id"]}, - "client_secret": {"type": "string", "path_in_connector_config": ["client_secret"]}, - }, - }, - ), -) diff --git a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/spec.yaml b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/spec.yaml new file mode 100644 index 000000000000..ee50f4b4e95e --- /dev/null +++ b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/spec.yaml @@ -0,0 +1,128 @@ +--- +documentationUrl: https://docs.airbyte.com/integrations/sources/amazon-ads +connectionSpecification: + title: Amazon Ads Spec + type: object + properties: + auth_type: + title: Auth Type + const: oauth2.0 + order: 0 + type: string + client_id: + title: Client ID + description: + The client ID of your Amazon Ads developer application. See the + docs + for more information. + order: 1 + type: string + client_secret: + title: Client Secret + description: + The client secret of your Amazon Ads developer application. See + the docs + for more information. + airbyte_secret: true + order: 2 + type: string + refresh_token: + title: Refresh Token + description: + Amazon Ads refresh token. See the docs + for more information on how to obtain this token. + airbyte_secret: true + order: 3 + type: string + region: + title: Region * + description: + Region to pull data from (EU/NA/FE/SANDBOX). See docs + for more details. + enum: + - NA + - EU + - FE + - SANDBOX + type: string + default: NA + order: 4 + report_wait_timeout: + title: Report Wait Timeout * + description: Timeout duration in minutes for Reports. Default is 30 minutes. + default: 30 + examples: + - 30 + - 120 + order: 5 + type: integer + report_generation_max_retries: + title: Report Generation Maximum Retries * + description: + Maximum retries Airbyte will attempt for fetching report data. + Default is 5. + default: 5 + examples: + - 5 + - 10 + - 15 + order: 6 + type: integer + start_date: + title: Start Date (Optional) + description: + The Start date for collecting reports, should not be more than + 60 days in the past. In YYYY-MM-DD format + examples: + - "2022-10-10" + - "2022-10-22" + order: 7 + type: string + profiles: + title: Profile IDs (Optional) + description: + Profile IDs you want to fetch data for. See docs + for more details. + order: 8 + type: array + items: + type: integer + required: + - client_id + - client_secret + - refresh_token + additionalProperties: true +advanced_auth: + auth_flow_type: oauth2.0 + predicate_key: + - auth_type + predicate_value: oauth2.0 + oauth_config_specification: + complete_oauth_output_specification: + type: object + additionalProperties: false + properties: + refresh_token: + type: string + path_in_connector_config: + - refresh_token + complete_oauth_server_input_specification: + type: object + additionalProperties: false + properties: + client_id: + type: string + client_secret: + type: string + complete_oauth_server_output_specification: + type: object + additionalProperties: false + properties: + client_id: + type: string + path_in_connector_config: + - client_id + client_secret: + type: string + path_in_connector_config: + - client_secret diff --git a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/common.py b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/common.py index c983af4e9587..f84b59a0f16a 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/common.py +++ b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/common.py @@ -14,7 +14,6 @@ from source_amazon_ads.constants import URL_MAPPING from source_amazon_ads.schemas import CatalogModel from source_amazon_ads.schemas.profile import Profile -from source_amazon_ads.spec import AmazonAdsConfig """ This class hierarchy may seem overcomplicated so here is a visualization of @@ -74,10 +73,10 @@ class BasicAmazonAdsStream(Stream, ABC): Base class for all Amazon Ads streams. """ - def __init__(self, config: AmazonAdsConfig, profiles: List[Profile] = None): + def __init__(self, config: Mapping[str, Any], profiles: List[Profile] = None): self._profiles = profiles or [] - self._client_id = config.client_id - self._url = URL_MAPPING[config.region] + self._client_id = config["client_id"] + self._url = URL_MAPPING[config["region"]] @property @abstractmethod @@ -98,7 +97,7 @@ class AmazonAdsStream(HttpStream, BasicAmazonAdsStream): Class for getting data from streams that based on single http request. """ - def __init__(self, config: AmazonAdsConfig, *args, profiles: List[Profile] = None, **kwargs): + def __init__(self, config: Mapping[str, Any], *args, profiles: List[Profile] = None, **kwargs): # Each AmazonAdsStream instance are dependant on list of profiles. BasicAmazonAdsStream.__init__(self, config, profiles=profiles) HttpStream.__init__(self, *args, **kwargs) diff --git a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/report_streams/report_streams.py b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/report_streams/report_streams.py index 369137d387a5..4ebaa733978f 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/report_streams/report_streams.py +++ b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/report_streams/report_streams.py @@ -22,7 +22,6 @@ from pendulum import DateTime from pydantic import BaseModel from source_amazon_ads.schemas import CatalogModel, MetricsReport, Profile -from source_amazon_ads.spec import AmazonAdsConfig from source_amazon_ads.streams.common import BasicAmazonAdsStream logger = AirbyteLogger() @@ -101,14 +100,14 @@ class ReportStream(BasicAmazonAdsStream, ABC): REPORT_DATE_FORMAT = "YYYYMMDD" cursor_field = "reportDate" - def __init__(self, config: AmazonAdsConfig, profiles: List[Profile], authenticator: Oauth2Authenticator): + def __init__(self, config: Mapping[str, Any], profiles: List[Profile], authenticator: Oauth2Authenticator): self._authenticator = authenticator self._session = requests.Session() self._model = self._generate_model() - self.report_wait_timeout = timedelta(minutes=config.report_wait_timeout).total_seconds - self.report_generation_maximum_retries = config.report_generation_max_retries + self.report_wait_timeout = timedelta(minutes=config.get("report_wait_timeout", 30)).total_seconds + self.report_generation_maximum_retries = config.get("report_generation_max_retries", 5) # Set start date from config file, should be in UTC timezone. - self._start_date = pendulum.parse(config.start_date).set(tz="UTC") if config.start_date else None + self._start_date = pendulum.parse(config.get("start_date")).set(tz="UTC") if config.get("start_date") else None super().__init__(config, profiles) @property diff --git a/airbyte-integrations/connectors/source-amazon-ads/unit_tests/conftest.py b/airbyte-integrations/connectors/source-amazon-ads/unit_tests/conftest.py index a774e5e53153..727f81d7001f 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/unit_tests/conftest.py +++ b/airbyte-integrations/connectors/source-amazon-ads/unit_tests/conftest.py @@ -6,12 +6,13 @@ @fixture -def test_config(): +def config(): return { "client_id": "test_client_id", "client_secret": "test_client_secret", "scope": "test_scope", "refresh_token": "test_refresh", + "region": "NA", } diff --git a/airbyte-integrations/connectors/source-amazon-ads/unit_tests/test_report_streams.py b/airbyte-integrations/connectors/source-amazon-ads/unit_tests/test_report_streams.py index 07d3b51b7c10..3edac2ffabc5 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/unit_tests/test_report_streams.py +++ b/airbyte-integrations/connectors/source-amazon-ads/unit_tests/test_report_streams.py @@ -13,7 +13,6 @@ from pytest import raises from requests.exceptions import ConnectionError from source_amazon_ads.schemas.profile import AccountInfo, Profile -from source_amazon_ads.spec import AmazonAdsConfig from source_amazon_ads.streams import ( SponsoredBrandsReportStream, SponsoredBrandsVideoReportStream, @@ -105,14 +104,13 @@ def make_profiles(profile_type="seller"): @responses.activate -def test_display_report_stream(test_config): +def test_display_report_stream(config): setup_responses( init_response=REPORT_INIT_RESPONSE, status_response=REPORT_STATUS_RESPONSE, metric_response=METRIC_RESPONSE, ) - config = AmazonAdsConfig(**test_config) profiles = make_profiles() stream = SponsoredDisplayReportStream(config, profiles, authenticator=mock.MagicMock()) @@ -130,14 +128,13 @@ def test_display_report_stream(test_config): @responses.activate -def test_products_report_stream(test_config): +def test_products_report_stream(config): setup_responses( init_response_products=REPORT_INIT_RESPONSE, status_response=REPORT_STATUS_RESPONSE, metric_response=METRIC_RESPONSE, ) - config = AmazonAdsConfig(**test_config) profiles = make_profiles(profile_type="vendor") stream = SponsoredProductsReportStream(config, profiles, authenticator=mock.MagicMock()) @@ -147,14 +144,13 @@ def test_products_report_stream(test_config): @responses.activate -def test_brands_report_stream(test_config): +def test_brands_report_stream(config): setup_responses( init_response_brands=REPORT_INIT_RESPONSE, status_response=REPORT_STATUS_RESPONSE, metric_response=METRIC_RESPONSE, ) - config = AmazonAdsConfig(**test_config) profiles = make_profiles() stream = SponsoredBrandsReportStream(config, profiles, authenticator=mock.MagicMock()) @@ -164,14 +160,13 @@ def test_brands_report_stream(test_config): @responses.activate -def test_brands_video_report_stream(test_config): +def test_brands_video_report_stream(config): setup_responses( init_response_brands=REPORT_INIT_RESPONSE, status_response=REPORT_STATUS_RESPONSE, metric_response=METRIC_RESPONSE, ) - config = AmazonAdsConfig(**test_config) profiles = make_profiles() stream = SponsoredBrandsVideoReportStream(config, profiles, authenticator=mock.MagicMock()) @@ -181,8 +176,7 @@ def test_brands_video_report_stream(test_config): @responses.activate -def test_display_report_stream_init_failure(mocker, test_config): - config = AmazonAdsConfig(**test_config) +def test_display_report_stream_init_failure(mocker, config): profiles = make_profiles() stream = SponsoredDisplayReportStream(config, profiles, authenticator=mock.MagicMock()) stream_slice = {"reportDate": "20210725"} @@ -199,9 +193,8 @@ def test_display_report_stream_init_failure(mocker, test_config): @responses.activate -def test_display_report_stream_init_http_exception(mocker, test_config): +def test_display_report_stream_init_http_exception(mocker, config): mocker.patch("time.sleep", lambda x: None) - config = AmazonAdsConfig(**test_config) profiles = make_profiles() stream = SponsoredDisplayReportStream(config, profiles, authenticator=mock.MagicMock()) stream_slice = {"reportDate": "20210725"} @@ -213,9 +206,8 @@ def test_display_report_stream_init_http_exception(mocker, test_config): @responses.activate -def test_display_report_stream_init_too_many_requests(mocker, test_config): +def test_display_report_stream_init_too_many_requests(mocker, config): mocker.patch("time.sleep", lambda x: None) - config = AmazonAdsConfig(**test_config) profiles = make_profiles() stream = SponsoredDisplayReportStream(config, profiles, authenticator=mock.MagicMock()) stream_slice = {"reportDate": "20210725"} @@ -269,7 +261,7 @@ def test_display_report_stream_init_too_many_requests(mocker, test_config): ], ) @responses.activate -def test_display_report_stream_backoff(mocker, test_config, modifiers, expected): +def test_display_report_stream_backoff(mocker, config, modifiers, expected): mocker.patch("time.sleep") setup_responses(init_response=REPORT_INIT_RESPONSE, metric_response=METRIC_RESPONSE) @@ -292,7 +284,6 @@ def __call__(self, request): callback = StatusCallback() responses.add_callback(responses.GET, re.compile(r"https://advertising-api.amazon.com/v2/reports/[^/]+$"), callback=callback) - config = AmazonAdsConfig(**test_config) profiles = make_profiles() stream = SponsoredDisplayReportStream(config, profiles, authenticator=mock.MagicMock()) stream_slice = {"reportDate": "20210725"} @@ -307,8 +298,7 @@ def __call__(self, request): @freeze_time("2021-07-30 04:26:08") @responses.activate -def test_display_report_stream_slices_full_refresh(test_config): - config = AmazonAdsConfig(**test_config) +def test_display_report_stream_slices_full_refresh(config): stream = SponsoredDisplayReportStream(config, None, authenticator=mock.MagicMock()) slices = stream.stream_slices(SyncMode.full_refresh, cursor_field=stream.cursor_field) assert slices == [{"reportDate": "20210730"}] @@ -316,8 +306,7 @@ def test_display_report_stream_slices_full_refresh(test_config): @freeze_time("2021-07-30 04:26:08") @responses.activate -def test_display_report_stream_slices_incremental(test_config): - config = AmazonAdsConfig(**test_config) +def test_display_report_stream_slices_incremental(config): stream = SponsoredDisplayReportStream(config, None, authenticator=mock.MagicMock()) stream_state = {"reportDate": "20210726"} slices = stream.stream_slices(SyncMode.incremental, cursor_field=stream.cursor_field, stream_state=stream_state) diff --git a/airbyte-integrations/connectors/source-amazon-ads/unit_tests/test_source.py b/airbyte-integrations/connectors/source-amazon-ads/unit_tests/test_source.py index 812cd676dca7..52876c71b9f7 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/unit_tests/test_source.py +++ b/airbyte-integrations/connectors/source-amazon-ads/unit_tests/test_source.py @@ -22,35 +22,35 @@ def setup_responses(): @responses.activate -def test_discover(test_config): +def test_discover(config): setup_responses() source = SourceAmazonAds() - catalog = source.discover(None, test_config) + catalog = source.discover(None, config) catalog = AirbyteMessage(type=Type.CATALOG, catalog=catalog).dict(exclude_unset=True) schemas = [stream["json_schema"] for stream in catalog["catalog"]["streams"]] for schema in schemas: Draft4Validator.check_schema(schema) -def test_spec(test_config): +def test_spec(): source = SourceAmazonAds() - spec = source.spec() + spec = source.spec(None) assert isinstance(spec, ConnectorSpecification) @responses.activate -def test_check(test_config): +def test_check(config): setup_responses() source = SourceAmazonAds() - assert source.check(None, test_config) == AirbyteConnectionStatus(status=Status.SUCCEEDED) + assert source.check(None, config) == AirbyteConnectionStatus(status=Status.SUCCEEDED) assert len(responses.calls) == 2 @responses.activate -def test_source_streams(test_config): +def test_source_streams(config): setup_responses() source = SourceAmazonAds() - streams = source.streams(test_config) + streams = source.streams(config) assert len(streams) == 18 actual_stream_names = {stream.name for stream in streams} expected_stream_names = set( diff --git a/airbyte-integrations/connectors/source-amazon-ads/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-amazon-ads/unit_tests/test_streams.py index 2de4fe0a5611..43400b76a9f7 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/unit_tests/test_streams.py +++ b/airbyte-integrations/connectors/source-amazon-ads/unit_tests/test_streams.py @@ -78,11 +78,11 @@ def get_stream_by_name(streams, stream_name): @responses.activate -def test_streams_profile(test_config, profiles_response): +def test_streams_profile(config, profiles_response): setup_responses(profiles_response=profiles_response) source = SourceAmazonAds() - streams = source.streams(test_config) + streams = source.streams(config) profile_stream = get_stream_by_name(streams, "profiles") schema = profile_stream.get_json_schema() @@ -97,7 +97,7 @@ def test_streams_profile(test_config, profiles_response): @responses.activate -def test_streams_campaigns_4_vendors(test_config, profiles_response, campaigns_response): +def test_streams_campaigns_4_vendors(config, profiles_response, campaigns_response): profiles_response = json.loads(profiles_response) for profile in profiles_response: profile["accountInfo"]["type"] = "vendor" @@ -105,7 +105,7 @@ def test_streams_campaigns_4_vendors(test_config, profiles_response, campaigns_r setup_responses(profiles_response=profiles_response, campaigns_response=campaigns_response) source = SourceAmazonAds() - streams = source.streams(test_config) + streams = source.streams(config) profile_stream = get_stream_by_name(streams, "profiles") campaigns_stream = get_stream_by_name(streams, "sponsored_display_campaigns") profile_records = get_all_stream_records(profile_stream) @@ -118,7 +118,7 @@ def test_streams_campaigns_4_vendors(test_config, profiles_response, campaigns_r [1, 2, 5, 1000000], ) @responses.activate -def test_streams_campaigns_pagination(mocker, test_config, profiles_response, campaigns_response, page_size): +def test_streams_campaigns_pagination(mocker, config, profiles_response, campaigns_response, page_size): mocker.patch("source_amazon_ads.streams.common.SubProfilesStream.page_size", page_size) profiles_response = json.loads(profiles_response) for profile in profiles_response: @@ -127,7 +127,7 @@ def test_streams_campaigns_pagination(mocker, test_config, profiles_response, ca setup_responses(profiles_response=profiles_response) source = SourceAmazonAds() - streams = source.streams(test_config) + streams = source.streams(config) profile_stream = get_stream_by_name(streams, "profiles") campaigns_stream = get_stream_by_name(streams, "sponsored_display_campaigns") campaigns = json.loads(campaigns_response) @@ -153,7 +153,7 @@ def campaigns_paginated_response_cb(request): @pytest.mark.parametrize(("status_code"), [HTTPStatus.FORBIDDEN, HTTPStatus.UNAUTHORIZED]) @responses.activate -def test_streams_campaigns_pagination_403_error(mocker, status_code, test_config, profiles_response, campaigns_response): +def test_streams_campaigns_pagination_403_error(mocker, status_code, config, profiles_response, campaigns_response): setup_responses(profiles_response=profiles_response) responses.add( responses.GET, @@ -162,7 +162,7 @@ def test_streams_campaigns_pagination_403_error(mocker, status_code, test_config status=status_code, ) source = SourceAmazonAds() - streams = source.streams(test_config) + streams = source.streams(config) campaigns_stream = get_stream_by_name(streams, "sponsored_display_campaigns") with pytest.raises(requests.exceptions.HTTPError): @@ -170,7 +170,7 @@ def test_streams_campaigns_pagination_403_error(mocker, status_code, test_config @responses.activate -def test_streams_campaigns_pagination_403_error_expected(mocker, test_config, profiles_response, campaigns_response): +def test_streams_campaigns_pagination_403_error_expected(mocker, config, profiles_response, campaigns_response): setup_responses(profiles_response=profiles_response) responses.add( responses.GET, @@ -179,7 +179,7 @@ def test_streams_campaigns_pagination_403_error_expected(mocker, test_config, pr status=403, ) source = SourceAmazonAds() - streams = source.streams(test_config) + streams = source.streams(config) campaigns_stream = get_stream_by_name(streams, "sponsored_display_campaigns") campaigns_records = get_all_stream_records(campaigns_stream) @@ -196,7 +196,7 @@ def test_streams_campaigns_pagination_403_error_expected(mocker, test_config, pr ) @responses.activate def test_streams_displays( - test_config, + config, stream_name, endpoint, profiles_response, @@ -212,7 +212,7 @@ def test_streams_displays( ) source = SourceAmazonAds() - streams = source.streams(test_config) + streams = source.streams(config) test_stream = get_stream_by_name(streams, stream_name) records = get_all_stream_records(test_stream) @@ -238,11 +238,11 @@ def test_streams_displays( ], ) @responses.activate -def test_streams_brands_and_products(test_config, stream_name, endpoint, profiles_response): +def test_streams_brands_and_products(config, stream_name, endpoint, profiles_response): setup_responses(profiles_response=profiles_response, generic_response=endpoint) source = SourceAmazonAds() - streams = source.streams(test_config) + streams = source.streams(config) test_stream = get_stream_by_name(streams, stream_name) records = get_all_stream_records(test_stream) diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/Dockerfile b/airbyte-integrations/connectors/source-amazon-seller-partner/Dockerfile index de02a35059d6..10683d5aaeac 100644 --- a/airbyte-integrations/connectors/source-amazon-seller-partner/Dockerfile +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/Dockerfile @@ -12,5 +12,5 @@ RUN pip install . ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.2.21 +LABEL io.airbyte.version=0.2.22 LABEL io.airbyte.name=airbyte/source-amazon-seller-partner diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/streams.py b/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/streams.py index 5c422858b6b3..804f350823cc 100644 --- a/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/streams.py +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/streams.py @@ -764,14 +764,24 @@ def request_params( if next_page_token: return dict(next_page_token) - params = {self.replication_start_date_field: self._replication_start_date, self.page_size_field: self.page_size} - # for finance APIs, end date-time must be no later than two minutes before the request was submitted end_date = pendulum.now("utc").subtract(minutes=2, seconds=10).strftime(DATE_TIME_FORMAT) if self._replication_end_date: end_date = self._replication_end_date - params[self.replication_end_date_field] = end_date + # start date and end date should not be more than 180 days apart. + start_date = max(pendulum.parse(self._replication_start_date), pendulum.parse(end_date).subtract(days=180)).strftime( + DATE_TIME_FORMAT + ) + + # logging to make sure user knows taken start date + logger.info("start date used: %s", start_date) + + params = { + self.replication_start_date_field: start_date, + self.replication_end_date_field: end_date, + self.page_size_field: self.page_size, + } return params def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/test_finance_streams.py b/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/test_finance_streams.py index cbec8cb8a84d..cb1ceba6a98d 100644 --- a/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/test_finance_streams.py +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/test_finance_streams.py @@ -2,6 +2,7 @@ # Copyright (c) 2022 Airbyte, Inc., all rights reserved. # +import pendulum import pytest import requests from source_amazon_seller_partner.auth import AWSSignature @@ -83,51 +84,65 @@ } } +DATE_TIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ" + +START_DATE_1 = "2022-05-25T00:00:00Z" +END_DATE_1 = "2022-05-26T00:00:00Z" + +START_DATE_2 = "2021-01-01T00:00:00Z" +END_DATE_2 = "2022-07-31T00:00:00Z" + @pytest.fixture def list_financial_event_groups_stream(): - aws_signature = AWSSignature( - service="execute-api", - aws_access_key_id="AccessKeyId", - aws_secret_access_key="SecretAccessKey", - aws_session_token="SessionToken", - region="US", - ) - stream = ListFinancialEventGroups( - url_base="https://test.url", - aws_signature=aws_signature, - replication_start_date="2022-05-25T00:00:00Z", - replication_end_date="2022-05-26T00:00:00Z", - marketplace_id="id", - authenticator=None, - period_in_days=0, - report_options=None, - max_wait_seconds=500, - ) - return stream + def _internal(start_date: str = START_DATE_1, end_date: str = END_DATE_1): + aws_signature = AWSSignature( + service="execute-api", + aws_access_key_id="AccessKeyId", + aws_secret_access_key="SecretAccessKey", + aws_session_token="SessionToken", + region="US", + ) + stream = ListFinancialEventGroups( + url_base="https://test.url", + aws_signature=aws_signature, + replication_start_date=start_date, + replication_end_date=end_date, + marketplace_id="id", + authenticator=None, + period_in_days=0, + report_options=None, + max_wait_seconds=500, + ) + return stream + + return _internal @pytest.fixture def list_financial_events_stream(): - aws_signature = AWSSignature( - service="execute-api", - aws_access_key_id="AccessKeyId", - aws_secret_access_key="SecretAccessKey", - aws_session_token="SessionToken", - region="US", - ) - stream = ListFinancialEvents( - url_base="https://test.url", - aws_signature=aws_signature, - replication_start_date="2022-05-25T00:00:00Z", - replication_end_date="2022-05-26T00:00:00Z", - marketplace_id="id", - authenticator=None, - period_in_days=0, - report_options=None, - max_wait_seconds=500, - ) - return stream + def _internal(start_date: str = START_DATE_1, end_date: str = END_DATE_1): + aws_signature = AWSSignature( + service="execute-api", + aws_access_key_id="AccessKeyId", + aws_secret_access_key="SecretAccessKey", + aws_session_token="SessionToken", + region="US", + ) + stream = ListFinancialEvents( + url_base="https://test.url", + aws_signature=aws_signature, + replication_start_date=start_date, + replication_end_date=end_date, + marketplace_id="id", + authenticator=None, + period_in_days=0, + report_options=None, + max_wait_seconds=500, + ) + return stream + + return _internal def test_finance_stream_next_token(mocker, list_financial_event_groups_stream): @@ -135,48 +150,68 @@ def test_finance_stream_next_token(mocker, list_financial_event_groups_stream): token = "aabbccddeeff" expected = {"NextToken": token} mocker.patch.object(response, "json", return_value={"payload": expected}) - assert expected == list_financial_event_groups_stream.next_page_token(response) + assert expected == list_financial_event_groups_stream().next_page_token(response) mocker.patch.object(response, "json", return_value={"payload": {}}) - if list_financial_event_groups_stream.next_page_token(response) is not None: + if list_financial_event_groups_stream().next_page_token(response) is not None: assert False def test_financial_event_groups_stream_request_params(list_financial_event_groups_stream): - params = { - "FinancialEventGroupStartedAfter": "2022-05-25T00:00:00Z", + # test 1 + expected_params = { + "FinancialEventGroupStartedAfter": START_DATE_1, "MaxResultsPerPage": 100, - "FinancialEventGroupStartedBefore": "2022-05-26T00:00:00Z", + "FinancialEventGroupStartedBefore": END_DATE_1, } - assert params == list_financial_event_groups_stream.request_params({}, None) + assert expected_params == list_financial_event_groups_stream().request_params({}, None) + # test 2 token = "aabbccddeeff" - params = {"NextToken": token} - assert params == list_financial_event_groups_stream.request_params({}, {"NextToken": token}) + expected_params = {"NextToken": token} + assert expected_params == list_financial_event_groups_stream().request_params({}, {"NextToken": token}) + + # test 3 - for 180 days limit + expected_params = { + "FinancialEventGroupStartedAfter": pendulum.parse(END_DATE_2).subtract(days=180).strftime(DATE_TIME_FORMAT), + "MaxResultsPerPage": 100, + "FinancialEventGroupStartedBefore": END_DATE_2, + } + assert expected_params == list_financial_event_groups_stream(START_DATE_2, END_DATE_2).request_params({}, None) def test_financial_event_groups_stream_parse_response(mocker, list_financial_event_groups_stream): response = requests.Response() mocker.patch.object(response, "json", return_value=list_financial_event_groups_data) - for record in list_financial_event_groups_stream.parse_response(response, {}): + for record in list_financial_event_groups_stream().parse_response(response, {}): assert record == list_financial_event_groups_data.get("payload").get("FinancialEventGroupList")[0] def test_financial_events_stream_request_params(list_financial_events_stream): - params = {"PostedAfter": "2022-05-25T00:00:00Z", "MaxResultsPerPage": 100, "PostedBefore": "2022-05-26T00:00:00Z"} - assert params == list_financial_events_stream.request_params({}, None) + # test 1 + expected_params = {"PostedAfter": START_DATE_1, "MaxResultsPerPage": 100, "PostedBefore": END_DATE_1} + assert expected_params == list_financial_events_stream().request_params({}, None) + # test 2 token = "aabbccddeeff" - params = {"NextToken": token} - assert params == list_financial_events_stream.request_params({}, {"NextToken": token}) + expected_params = {"NextToken": token} + assert expected_params == list_financial_events_stream().request_params({}, {"NextToken": token}) + + # test 3 - for 180 days limit + expected_params = { + "PostedAfter": pendulum.parse(END_DATE_2).subtract(days=180).strftime(DATE_TIME_FORMAT), + "MaxResultsPerPage": 100, + "PostedBefore": END_DATE_2, + } + assert expected_params == list_financial_events_stream(START_DATE_2, END_DATE_2).request_params({}, None) def test_financial_events_stream_parse_response(mocker, list_financial_events_stream): response = requests.Response() mocker.patch.object(response, "json", return_value=list_financial_events_data) - for record in list_financial_events_stream.parse_response(response, {}): + for record in list_financial_events_stream().parse_response(response, {}): assert list_financial_events_data.get("payload").get("FinancialEvents").get("ShipmentEventList") == record.get("ShipmentEventList") assert list_financial_events_data.get("payload").get("FinancialEvents").get("RefundEventList") == record.get("RefundEventList") assert list_financial_events_data.get("payload").get("FinancialEvents").get("AdjustmentEventList") == record.get( diff --git a/airbyte-integrations/connectors/source-amplitude/Dockerfile b/airbyte-integrations/connectors/source-amplitude/Dockerfile index 247936e15209..3d88bb080ab9 100644 --- a/airbyte-integrations/connectors/source-amplitude/Dockerfile +++ b/airbyte-integrations/connectors/source-amplitude/Dockerfile @@ -12,5 +12,5 @@ RUN pip install . ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.8 +LABEL io.airbyte.version=0.1.10 LABEL io.airbyte.name=airbyte/source-amplitude diff --git a/airbyte-integrations/connectors/source-amplitude/source_amplitude/api.py b/airbyte-integrations/connectors/source-amplitude/source_amplitude/api.py index daac4eb408b2..65636acbdbd7 100644 --- a/airbyte-integrations/connectors/source-amplitude/source_amplitude/api.py +++ b/airbyte-integrations/connectors/source-amplitude/source_amplitude/api.py @@ -144,7 +144,16 @@ class Events(IncrementalAmplitudeStream): def parse_response(self, response: requests.Response, stream_state: Mapping[str, Any] = None, **kwargs) -> Iterable[Mapping]: state_value = stream_state[self.cursor_field] if stream_state else self._start_date.strftime(self.compare_date_template) - zip_file = zipfile.ZipFile(io.BytesIO(response.content)) + try: + zip_file = zipfile.ZipFile(io.BytesIO(response.content)) + except zipfile.BadZipFile as e: + self.logger.exception(e) + self.logger.error( + f"Received an invalid zip file in response to URL: {response.request.url}." + f"The size of the response body is: {len(response.content)}" + ) + return [] + for gzip_filename in zip_file.namelist(): with zip_file.open(gzip_filename) as file: for record in self._parse_zip_file(file): @@ -164,7 +173,7 @@ def stream_slices(self, stream_state: Mapping[str, Any] = None, **kwargs) -> Ite slices.append( { "start": start.strftime(self.date_template), - "end": self._get_end_date(start).strftime(self.date_template), + "end": start.add(**self.time_interval).subtract(hours=1).strftime(self.date_template), } ) start = start.add(**self.time_interval) @@ -203,6 +212,9 @@ def request_params(self, stream_slice: Mapping[str, Any], **kwargs) -> MutableMa params["end"] = pendulum.parse(stream_slice["end"]).strftime(self.date_template) return params + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + return None + def path(self, **kwargs) -> str: return f"{self.api_version}/export" diff --git a/airbyte-integrations/connectors/source-amplitude/unit_tests/test_api.py b/airbyte-integrations/connectors/source-amplitude/unit_tests/test_api.py index a60e533d6ec5..7fb88cf99603 100644 --- a/airbyte-integrations/connectors/source-amplitude/unit_tests/test_api.py +++ b/airbyte-integrations/connectors/source-amplitude/unit_tests/test_api.py @@ -130,9 +130,8 @@ def test_request_params(self, stream_cls, expected): [ (ActiveUsers, {}), (AverageSessionLength, {}), - (Events, {}), ], - ids=["ActiveUsers", "AverageSessionLength", "Events"], + ids=["ActiveUsers", "AverageSessionLength"], ) def test_next_page_token(self, requests_mock, stream_cls, expected): days_ago = pendulum.now().subtract(days=2) @@ -176,7 +175,12 @@ def test_parse_zip(self): def test_stream_slices(self): stream = Events(pendulum.now().isoformat()) now = pendulum.now() - expected = [{"start": now.strftime(stream.date_template), "end": stream._get_end_date(now).strftime(stream.date_template)}] + expected = [ + { + "start": now.strftime(stream.date_template), + "end": stream._get_end_date(now).add(**stream.time_interval).subtract(hours=1).strftime(stream.date_template), + } + ] assert expected == stream.stream_slices() def test_request_params(self): diff --git a/airbyte-integrations/connectors/source-bigquery/Dockerfile b/airbyte-integrations/connectors/source-bigquery/Dockerfile index b1ea35f6ab86..dc501b8357f4 100644 --- a/airbyte-integrations/connectors/source-bigquery/Dockerfile +++ b/airbyte-integrations/connectors/source-bigquery/Dockerfile @@ -17,5 +17,5 @@ ENV APPLICATION source-bigquery COPY --from=build /airbyte /airbyte # Airbyte's build system uses these labels to know what to name and tag the docker images produced by this Dockerfile. -LABEL io.airbyte.version=0.1.7 +LABEL io.airbyte.version=0.1.8 LABEL io.airbyte.name=airbyte/source-bigquery diff --git a/airbyte-integrations/connectors/source-bigquery/src/test-integration/java/io/airbyte/integrations/source/bigquery/BigQuerySourceDatatypeTest.java b/airbyte-integrations/connectors/source-bigquery/src/test-integration/java/io/airbyte/integrations/source/bigquery/BigQuerySourceDatatypeTest.java index 05bc5b55ccff..fb7fad3e5231 100644 --- a/airbyte-integrations/connectors/source-bigquery/src/test-integration/java/io/airbyte/integrations/source/bigquery/BigQuerySourceDatatypeTest.java +++ b/airbyte-integrations/connectors/source-bigquery/src/test-integration/java/io/airbyte/integrations/source/bigquery/BigQuerySourceDatatypeTest.java @@ -284,7 +284,7 @@ protected void initTests() { .airbyteType(JsonSchemaType.STRING) .createTablePatternSql(CREATE_SQL_PATTERN) .addInsertValues("['a', 'b']") - .addExpectedValues("[{\"test_column\":\"a\"},{\"test_column\":\"b\"}]") + .addExpectedValues("[\"a\",\"b\"]") .build()); addDataTypeTestData( diff --git a/airbyte-integrations/connectors/source-bing-ads/Dockerfile b/airbyte-integrations/connectors/source-bing-ads/Dockerfile index 683c8ee29044..9a5b1791d115 100644 --- a/airbyte-integrations/connectors/source-bing-ads/Dockerfile +++ b/airbyte-integrations/connectors/source-bing-ads/Dockerfile @@ -12,5 +12,5 @@ RUN pip install . ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.7 +LABEL io.airbyte.version=0.1.8 LABEL io.airbyte.name=airbyte/source-bing-ads diff --git a/airbyte-integrations/connectors/source-bing-ads/bootstrap.md b/airbyte-integrations/connectors/source-bing-ads/bootstrap.md index 12f1d8862494..7dc3b750d3b2 100644 --- a/airbyte-integrations/connectors/source-bing-ads/bootstrap.md +++ b/airbyte-integrations/connectors/source-bing-ads/bootstrap.md @@ -31,7 +31,7 @@ Initially all fields in report streams have string values, connector uses `repor Connector uses `reports_start_date` config for initial reports sync and current date as an end data. -Connector has `hourly_reports`, `daily_reports`, `weekly_reports`, `monthly_reports` configs which allows to enable appropriate report streams. For example `account_performance_report_daily`, `ad_group_performance_report_daily` etc ... By default all report streams are disabled +Connector has `hourly_reports`, `daily_reports`, `weekly_reports`, `monthly_reports` report streams. For example `account_performance_report_daily`, `ad_group_performance_report_weekly`. All these reports streams will be generated on execute. ## Request caching diff --git a/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/client.py b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/client.py index adee0c764964..e1b13afec54c 100644 --- a/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/client.py +++ b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/client.py @@ -38,10 +38,6 @@ def __init__( self, tenant_id: str, reports_start_date: str, - hourly_reports: bool, - daily_reports: bool, - weekly_reports: bool, - monthly_reports: bool, developer_token: str = None, client_id: str = None, client_secret: str = None, @@ -51,10 +47,6 @@ def __init__( self.authorization_data: Mapping[str, AuthorizationData] = {} self.refresh_token = refresh_token self.developer_token = developer_token - self.hourly_reports = hourly_reports - self.daily_reports = daily_reports - self.weekly_reports = weekly_reports - self.monthly_reports = monthly_reports self.client_id = client_id self.client_secret = client_secret diff --git a/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/source.py b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/source.py index 424734f013c6..ca977c46d07a 100644 --- a/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/source.py +++ b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/source.py @@ -597,16 +597,11 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: Campaigns(client, config), ] - if config["hourly_reports"] or config["daily_reports"] or config["weekly_reports"] or config["monthly_reports"]: - streams.append(BudgetSummaryReport(client, config)) - - if config["hourly_reports"]: - streams.extend([c(client, config) for c in self.get_report_streams("Hourly")]) - if config["daily_reports"]: - streams.extend([c(client, config) for c in self.get_report_streams("Daily")]) - if config["weekly_reports"]: - streams.extend([c(client, config) for c in self.get_report_streams("Weekly")]) - if config["monthly_reports"]: - streams.extend([c(client, config) for c in self.get_report_streams("Monthly")]) + streams.append(BudgetSummaryReport(client, config)) + + streams.extend([c(client, config) for c in self.get_report_streams("Hourly")]) + streams.extend([c(client, config) for c in self.get_report_streams("Daily")]) + streams.extend([c(client, config) for c in self.get_report_streams("Weekly")]) + streams.extend([c(client, config) for c in self.get_report_streams("Monthly")]) return streams diff --git a/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/spec.json b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/spec.json index c6c847e87703..7807377f8d14 100644 --- a/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/spec.json +++ b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/spec.json @@ -8,11 +8,7 @@ "developer_token", "client_id", "refresh_token", - "reports_start_date", - "hourly_reports", - "daily_reports", - "weekly_reports", - "monthly_reports" + "reports_start_date" ], "additionalProperties": true, "properties": { @@ -64,30 +60,6 @@ "default": "2020-01-01", "description": "The start date from which to begin replicating report data. Any data generated before this date will not be replicated in reports. This is a UTC date in YYYY-MM-DD format.", "order": 5 - }, - "hourly_reports": { - "title": "Enable hourly-aggregate reports", - "type": "boolean", - "description": "Toggle this to enable replicating reports aggregated using an hourly time window. More information about report aggregation can be found in the docs.", - "default": false - }, - "daily_reports": { - "title": "Enable daily-aggregate reports", - "type": "boolean", - "description": "Toggle this to enable replicating reports aggregated using a daily time window. More information about report aggregation can be found in the docs.", - "default": false - }, - "weekly_reports": { - "title": "Enable weekly-aggregate reports", - "type": "boolean", - "description": "Toggle this to enable replicating reports aggregated using a weekly time window running from Sunday to Saturday. More information about report aggregation can be found in the docs.", - "default": false - }, - "monthly_reports": { - "title": "Enable monthly-aggregate reports", - "type": "boolean", - "description": "Toggle this to enable replicating reports aggregated using a monthly time window. More information about report aggregation can be found in the docs.", - "default": false } } }, diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/test_source.py b/airbyte-integrations/connectors/source-bing-ads/unit_tests/test_source.py index 7871efbbd8be..f6a0e0dbb081 100644 --- a/airbyte-integrations/connectors/source-bing-ads/unit_tests/test_source.py +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/test_source.py @@ -3,7 +3,7 @@ # import json -from unittest.mock import MagicMock, patch +from unittest.mock import patch import pytest import source_bing_ads @@ -28,12 +28,6 @@ def logger_mock_fixture(): @patch.object(source_bing_ads.source, "Client") def test_streams_config_based(mocked_client, config): streams = SourceBingAds().streams(config) - assert len(streams) == 15 - - -@patch.object(source_bing_ads.source, "Client") -def test_streams_all(mocked_client): - streams = SourceBingAds().streams(MagicMock()) assert len(streams) == 25 diff --git a/airbyte-integrations/connectors/source-clickhouse-strict-encrypt/build.gradle b/airbyte-integrations/connectors/source-clickhouse-strict-encrypt/build.gradle index a7f5d46c47c4..7116dd22ab3e 100644 --- a/airbyte-integrations/connectors/source-clickhouse-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/source-clickhouse-strict-encrypt/build.gradle @@ -24,5 +24,5 @@ dependencies { integrationTestJavaImplementation project(':airbyte-integrations:connectors:source-clickhouse') integrationTestJavaImplementation project(':airbyte-integrations:connectors:source-clickhouse-strict-encrypt') integrationTestJavaImplementation testFixtures(project(':airbyte-integrations:connectors:source-jdbc')) - integrationTestJavaImplementation libs.testcontainers.clickhouse + integrationTestJavaImplementation libs.connectors.source.testcontainers.clickhouse } diff --git a/airbyte-integrations/connectors/source-clickhouse/build.gradle b/airbyte-integrations/connectors/source-clickhouse/build.gradle index d54384a8f660..5280ad611802 100644 --- a/airbyte-integrations/connectors/source-clickhouse/build.gradle +++ b/airbyte-integrations/connectors/source-clickhouse/build.gradle @@ -22,5 +22,5 @@ dependencies { integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:source-clickhouse') integrationTestJavaImplementation testFixtures(project(':airbyte-integrations:connectors:source-jdbc')) - integrationTestJavaImplementation libs.testcontainers.clickhouse + integrationTestJavaImplementation libs.connectors.source.testcontainers.clickhouse } diff --git a/airbyte-integrations/connectors/source-clickhouse/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractSshClickHouseSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-clickhouse/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractSshClickHouseSourceAcceptanceTest.java index 1a08cc757aa1..dc1ca2afa2b1 100644 --- a/airbyte-integrations/connectors/source-clickhouse/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractSshClickHouseSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-clickhouse/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractSshClickHouseSourceAcceptanceTest.java @@ -29,6 +29,7 @@ import java.util.HashMap; import javax.sql.DataSource; import org.testcontainers.containers.ClickHouseContainer; +import org.testcontainers.containers.Network; public abstract class AbstractSshClickHouseSourceAcceptanceTest extends SourceAcceptanceTest { @@ -38,6 +39,7 @@ public abstract class AbstractSshClickHouseSourceAcceptanceTest extends SourceAc private static final String STREAM_NAME = "id_and_name"; private static final String STREAM_NAME2 = "starships"; private static final String SCHEMA_NAME = "default"; + private static final Network network = Network.newNetwork(); public abstract SshTunnel.TunnelMethod getTunnelMethod(); @@ -93,12 +95,12 @@ protected void setupEnvironment(final TestDestinationEnv environment) throws Exc } private void startTestContainers() { - bastion.initAndStartBastion(); + bastion.initAndStartBastion(network); initAndStartJdbcContainer(); } private void initAndStartJdbcContainer() { - db = (ClickHouseContainer) new ClickHouseContainer("yandex/clickhouse-server:21.8.8.29-alpine").withNetwork(bastion.getNetWork()); + db = (ClickHouseContainer) new ClickHouseContainer("yandex/clickhouse-server:21.8.8.29-alpine").withNetwork(network); db.start(); } diff --git a/airbyte-integrations/connectors/source-cockroachdb-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/source-cockroachdb-strict-encrypt/Dockerfile index f68558cfd607..b3ee12030a54 100644 --- a/airbyte-integrations/connectors/source-cockroachdb-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/source-cockroachdb-strict-encrypt/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-cockroachdb-strict-encrypt COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.12 +LABEL io.airbyte.version=0.1.13 LABEL io.airbyte.name=airbyte/source-cockroachdb-strict-encrypt diff --git a/airbyte-integrations/connectors/source-cockroachdb-strict-encrypt/build.gradle b/airbyte-integrations/connectors/source-cockroachdb-strict-encrypt/build.gradle index e7526db6fe9b..ce46931f0410 100644 --- a/airbyte-integrations/connectors/source-cockroachdb-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/source-cockroachdb-strict-encrypt/build.gradle @@ -17,9 +17,9 @@ dependencies { implementation project(':airbyte-integrations:connectors:source-relational-db') implementation project(':airbyte-integrations:connectors:source-cockroachdb') - implementation libs.testcontainers - implementation libs.testcontainers.jdbc - implementation libs.testcontainers.cockroachdb + implementation libs.connectors.testcontainers + implementation libs.connectors.testcontainers.jdbc + implementation libs.connectors.testcontainers.cockroachdb implementation libs.postgresql integrationTestJavaImplementation project(':airbyte-integrations:connectors:source-cockroachdb') diff --git a/airbyte-integrations/connectors/source-cockroachdb/build.gradle b/airbyte-integrations/connectors/source-cockroachdb/build.gradle index 1ce93c69f01c..1a73d68242aa 100644 --- a/airbyte-integrations/connectors/source-cockroachdb/build.gradle +++ b/airbyte-integrations/connectors/source-cockroachdb/build.gradle @@ -21,7 +21,7 @@ dependencies { testImplementation testFixtures(project(':airbyte-integrations:connectors:source-jdbc')) - testImplementation libs.testcontainers.cockroachdb + testImplementation libs.connectors.testcontainers.cockroachdb testImplementation 'org.apache.commons:commons-lang3:3.11' integrationTestJavaImplementation project(':airbyte-integrations:connectors:source-cockroachdb') diff --git a/airbyte-integrations/connectors/source-db2-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/source-db2-strict-encrypt/Dockerfile index dfa81a644c1e..4340d6cc98bd 100644 --- a/airbyte-integrations/connectors/source-db2-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/source-db2-strict-encrypt/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-db2-strict-encrypt COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.10 +LABEL io.airbyte.version=0.1.11 LABEL io.airbyte.name=airbyte/source-db2-strict-encrypt diff --git a/airbyte-integrations/connectors/source-db2-strict-encrypt/build.gradle b/airbyte-integrations/connectors/source-db2-strict-encrypt/build.gradle index 4e64e8cb1718..2c16590255c4 100644 --- a/airbyte-integrations/connectors/source-db2-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/source-db2-strict-encrypt/build.gradle @@ -22,7 +22,7 @@ dependencies { testImplementation testFixtures(project(':airbyte-integrations:connectors:source-jdbc')) testImplementation project(':airbyte-test-utils') - testImplementation libs.testcontainers.db2 + testImplementation libs.connectors.testcontainers.db2 integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:source-db2') diff --git a/airbyte-integrations/connectors/source-db2/Dockerfile b/airbyte-integrations/connectors/source-db2/Dockerfile index d2e3e152e7cc..379bf889370e 100644 --- a/airbyte-integrations/connectors/source-db2/Dockerfile +++ b/airbyte-integrations/connectors/source-db2/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-db2 COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.10 +LABEL io.airbyte.version=0.1.11 LABEL io.airbyte.name=airbyte/source-db2 diff --git a/airbyte-integrations/connectors/source-db2/build.gradle b/airbyte-integrations/connectors/source-db2/build.gradle index 5be43e55a70f..fb6fda9c43d9 100644 --- a/airbyte-integrations/connectors/source-db2/build.gradle +++ b/airbyte-integrations/connectors/source-db2/build.gradle @@ -21,7 +21,7 @@ dependencies { testImplementation testFixtures(project(':airbyte-integrations:connectors:source-jdbc')) testImplementation project(':airbyte-test-utils') - testImplementation libs.testcontainers.db2 + testImplementation libs.connectors.testcontainers.db2 integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:source-db2') diff --git a/airbyte-integrations/connectors/source-db2/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/Db2SourceCertificateAcceptanceTest.java b/airbyte-integrations/connectors/source-db2/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/Db2SourceCertificateAcceptanceTest.java index 9f3ea0629b19..ddb52ae71ee0 100644 --- a/airbyte-integrations/connectors/source-db2/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/Db2SourceCertificateAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-db2/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/Db2SourceCertificateAcceptanceTest.java @@ -15,6 +15,7 @@ import io.airbyte.integrations.source.db2.Db2Source; import io.airbyte.integrations.standardtest.source.SourceAcceptanceTest; import io.airbyte.integrations.standardtest.source.TestDestinationEnv; +import io.airbyte.integrations.util.HostPortResolver; import io.airbyte.protocol.models.CatalogHelpers; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.ConfiguredAirbyteStream; @@ -104,7 +105,7 @@ protected void setupEnvironment(final TestDestinationEnv environment) throws Exc config = Jsons.jsonNode(ImmutableMap.builder() .put("host", db.getHost()) - .put("port", db.getMappedPort(50000)) + .put("port", db.getFirstMappedPort()) .put("db", db.getDatabaseName()) .put("username", db.getUsername()) .put("password", db.getPassword()) diff --git a/airbyte-integrations/connectors/source-delighted/Dockerfile b/airbyte-integrations/connectors/source-delighted/Dockerfile index 068fdf381a2e..42986453d08c 100644 --- a/airbyte-integrations/connectors/source-delighted/Dockerfile +++ b/airbyte-integrations/connectors/source-delighted/Dockerfile @@ -12,5 +12,5 @@ RUN pip install . ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.3 +LABEL io.airbyte.version=0.1.4 LABEL io.airbyte.name=airbyte/source-delighted diff --git a/airbyte-integrations/connectors/source-delighted/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-delighted/integration_tests/invalid_config.json index b2856c90e4be..6e2dc7c913db 100644 --- a/airbyte-integrations/connectors/source-delighted/integration_tests/invalid_config.json +++ b/airbyte-integrations/connectors/source-delighted/integration_tests/invalid_config.json @@ -1,4 +1,4 @@ { "api_key": "wrong api key", - "since": 1625328197 + "since": "2022-01-01 00:00:00" } diff --git a/airbyte-integrations/connectors/source-delighted/source_delighted/source.py b/airbyte-integrations/connectors/source-delighted/source_delighted/source.py index 87d8d0c18130..f584d5c98792 100644 --- a/airbyte-integrations/connectors/source-delighted/source_delighted/source.py +++ b/airbyte-integrations/connectors/source-delighted/source_delighted/source.py @@ -8,6 +8,7 @@ from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple from urllib.parse import parse_qsl, urlparse +import pendulum import requests from airbyte_cdk.models import SyncMode from airbyte_cdk.sources import AbstractSource @@ -27,10 +28,14 @@ class DelightedStream(HttpStream, ABC): # Define primary key to all streams as primary key primary_key = "id" - def __init__(self, since: int, **kwargs): + def __init__(self, since: pendulum.datetime, **kwargs): super().__init__(**kwargs) self.since = since + @property + def since_ts(self) -> int: + return int(self.since.timestamp()) + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: response_data = response.json() if len(response_data) == self.limit: @@ -40,7 +45,7 @@ def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, def request_params( self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, any] = None, next_page_token: Mapping[str, Any] = None ) -> MutableMapping[str, Any]: - params = {"per_page": self.limit, "since": self.since} + params = {"per_page": self.limit, "since": self.since_ts} if next_page_token: params.update(**next_page_token) return params @@ -157,8 +162,7 @@ def check_connection(self, logger, config) -> Tuple[bool, any]: try: auth = self._get_authenticator(config) - args = {"authenticator": auth, "since": config["since"]} - stream = SurveyResponses(**args) + stream = SurveyResponses(authenticator=auth, since=pendulum.parse(config["since"])) records = stream.read_records(sync_mode=SyncMode.full_refresh) next(records) return True, None @@ -167,10 +171,10 @@ def check_connection(self, logger, config) -> Tuple[bool, any]: def streams(self, config: Mapping[str, Any]) -> List[Stream]: auth = self._get_authenticator(config) - args = {"authenticator": auth, "since": config["since"]} + stream_kwargs = {"authenticator": auth, "since": pendulum.parse(config["since"])} return [ - Bounces(**args), - People(**args), - SurveyResponses(**args), - Unsubscribes(**args), + Bounces(**stream_kwargs), + People(**stream_kwargs), + SurveyResponses(**stream_kwargs), + Unsubscribes(**stream_kwargs), ] diff --git a/airbyte-integrations/connectors/source-delighted/source_delighted/spec.json b/airbyte-integrations/connectors/source-delighted/source_delighted/spec.json index 5c6e276f36c8..0292ddfbb50e 100644 --- a/airbyte-integrations/connectors/source-delighted/source_delighted/spec.json +++ b/airbyte-integrations/connectors/source-delighted/source_delighted/spec.json @@ -8,14 +8,19 @@ "additionalProperties": false, "properties": { "since": { - "type": "integer", - "description": "An Unix timestamp to retrieve records created on or after this time.", - "examples": [1625328167] + "title": "Since", + "type": "string", + "description": "The date from which you'd like to replicate the data", + "examples": ["2022-05-30 04:50:23"], + "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2} ([0-9]{2}:[0-9]{2}:[0-9]{2})?$", + "order": 0 }, "api_key": { + "title": "Delighted API Key", "type": "string", "description": "A Delighted API key.", - "airbyte_secret": true + "airbyte_secret": true, + "order": 1 } } } diff --git a/airbyte-integrations/connectors/source-delighted/unit_tests/unit_test.py b/airbyte-integrations/connectors/source-delighted/unit_tests/unit_test.py index eed97adb87dc..d3d1c3116ab1 100644 --- a/airbyte-integrations/connectors/source-delighted/unit_tests/unit_test.py +++ b/airbyte-integrations/connectors/source-delighted/unit_tests/unit_test.py @@ -2,6 +2,7 @@ # Copyright (c) 2022 Airbyte, Inc., all rights reserved. # +import pendulum import pytest import responses from airbyte_cdk.models import SyncMode @@ -12,7 +13,7 @@ def test_config(): return { "api_key": "test_api_key", - "since": "1641289584", + "since": "2022-01-01 00:00:00", } @@ -74,7 +75,7 @@ def test_not_output_records_where_cursor_field_equals_state(state, test_config, status=200, ) - stream = stream_class(test_config["since"], authenticator=SourceDelighted()._get_authenticator(config=test_config)) + stream = stream_class(pendulum.parse(test_config["since"]), authenticator=SourceDelighted()._get_authenticator(config=test_config)) records = [r for r in stream.read_records(SyncMode.incremental, stream_state=state[stream.name])] assert not records diff --git a/airbyte-integrations/connectors/source-dockerhub/.dockerignore b/airbyte-integrations/connectors/source-dockerhub/.dockerignore new file mode 100644 index 000000000000..e311a05884c4 --- /dev/null +++ b/airbyte-integrations/connectors/source-dockerhub/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_dockerhub +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-dockerhub/Dockerfile b/airbyte-integrations/connectors/source-dockerhub/Dockerfile new file mode 100644 index 000000000000..058503c031b4 --- /dev/null +++ b/airbyte-integrations/connectors/source-dockerhub/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_dockerhub ./source_dockerhub + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-dockerhub diff --git a/airbyte-integrations/connectors/source-dockerhub/README.md b/airbyte-integrations/connectors/source-dockerhub/README.md new file mode 100644 index 000000000000..043b7f07bd9f --- /dev/null +++ b/airbyte-integrations/connectors/source-dockerhub/README.md @@ -0,0 +1,145 @@ +# Dockerhub Source + +This is the repository for the Dockerhub source connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/dockerhub) (not active yet). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.7.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +pip install '.[tests]' +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-dockerhub:build +``` + + + +### Locally running the connector +``` +python main.py spec +python main.py check --config sample_files/config.json +python main.py discover --config sample_files/config.json +python main.py read --config sample_files/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-dockerhub:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-dockerhub:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-dockerhub:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-dockerhub:dev check --config /sample_files/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-dockerhub:dev discover --config /sample_files/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-dockerhub:dev read --config /sample_files/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing +Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. +First install test dependencies into your virtual environment: +``` +pip install .[tests] +``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +python -m pytest unit_tests +``` + +### Integration Tests +There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). +#### Custom Integration tests +Place custom tests inside `integration_tests/` folder, then, from the connector root, run +``` +python -m pytest integration_tests +``` +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. +To run your integration tests with acceptance tests, from the connector root, run +``` +python -m pytest integration_tests -p integration_tests.acceptance +``` +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-dockerhub:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-dockerhub:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-dockerhub/acceptance-test-config.yml b/airbyte-integrations/connectors/source-dockerhub/acceptance-test-config.yml new file mode 100644 index 000000000000..353c0fd0ae64 --- /dev/null +++ b/airbyte-integrations/connectors/source-dockerhub/acceptance-test-config.yml @@ -0,0 +1,24 @@ +# See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-dockerhub:dev +tests: + spec: + - spec_path: "source_dockerhub/spec.yaml" + connection: + - config_path: "sample_files/config.json" + status: "succeed" + # even with an incorrect username the api still returns 200 so just ignoring the invalid config check for now + # - config_path: "integration_tests/invalid_config.json" + # status: "failed" + discovery: + - config_path: "sample_files/config.json" + basic_read: + - config_path: "sample_files/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + full_refresh: + - config_path: "sample_files/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + # testing sequentially for same results can fail because of pull counts increasing for an image between runs + ignored_fields: + "docker_hub": ["pull_count", "last_updated"] diff --git a/airbyte-integrations/connectors/source-dockerhub/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-dockerhub/acceptance-test-docker.sh new file mode 100644 index 000000000000..c51577d10690 --- /dev/null +++ b/airbyte-integrations/connectors/source-dockerhub/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-dockerhub/bootstrap.md b/airbyte-integrations/connectors/source-dockerhub/bootstrap.md new file mode 100644 index 000000000000..0c0f4fdec9b0 --- /dev/null +++ b/airbyte-integrations/connectors/source-dockerhub/bootstrap.md @@ -0,0 +1,14 @@ +# Dockerhub Source API + +- Origin issue/discussion: https://github.com/airbytehq/airbyte/issues/12773 +- API docs: https://docs.docker.com/registry/spec/api/ +- Helpful StackOverflow answer on DockerHub API auth call: https://stackoverflow.com/questions/56193110/how-can-i-use-docker-registry-http-api-v2-to-obtain-a-list-of-all-repositories-i#answer-68654659 + +All API calls need to be authenticated, but for public info, you can just obtain a short lived token from [this endpoint](https://auth.docker.io/token?service=registry.docker.io&scope=repository:library/alpine:pull) without any username/password, so this is what we have done for simplicity. + +If you are reading this in the future and need to expand this source connector to include private data, do take note that you'll need to add the `/secrets/config.json` files and change the auth strategy (we think it takes either HTTP basic auth or Oauth2 to the same endpoint, with the right scope): + +- Original notes: https://github.com/airbytehq/airbyte/issues/12773#issuecomment-1126785570 +- Auth docs: https://docs.docker.com/registry/spec/auth/jwt/ +- Might also want to use OAuth2: https://docs.docker.com/registry/spec/auth/oauth/ +- Scope docs: https://docs.docker.com/registry/spec/auth/scope/ \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-dockerhub/build.gradle b/airbyte-integrations/connectors/source-dockerhub/build.gradle new file mode 100644 index 000000000000..4cc7b7adfa87 --- /dev/null +++ b/airbyte-integrations/connectors/source-dockerhub/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_dockerhub' +} diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/request_params/__init__.py b/airbyte-integrations/connectors/source-dockerhub/integration_tests/__init__.py similarity index 100% rename from airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/request_params/__init__.py rename to airbyte-integrations/connectors/source-dockerhub/integration_tests/__init__.py diff --git a/airbyte-integrations/connectors/source-dockerhub/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-dockerhub/integration_tests/abnormal_state.json new file mode 100644 index 000000000000..52b0f2c2118f --- /dev/null +++ b/airbyte-integrations/connectors/source-dockerhub/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "todo-abnormal-value" + } +} diff --git a/airbyte-integrations/connectors/source-dockerhub/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-dockerhub/integration_tests/acceptance.py new file mode 100644 index 000000000000..1a6f55e7224b --- /dev/null +++ b/airbyte-integrations/connectors/source-dockerhub/integration_tests/acceptance.py @@ -0,0 +1,20 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import os +import pathlib +import shutil + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This source doesn't have any secrets, so this copies the sample_files config into secrets/ for acceptance tests""" + src_folder = pathlib.Path(__file__).parent.parent.resolve() + os.makedirs(f"{src_folder}/secrets", exist_ok=True) + shutil.copy(f"{src_folder}/sample_files/config.json", f"{src_folder}/secrets/") diff --git a/airbyte-integrations/connectors/source-dockerhub/integration_tests/catalog.json b/airbyte-integrations/connectors/source-dockerhub/integration_tests/catalog.json new file mode 100644 index 000000000000..9627353a77b8 --- /dev/null +++ b/airbyte-integrations/connectors/source-dockerhub/integration_tests/catalog.json @@ -0,0 +1,62 @@ +{ + "streams": [ + { + "name": "docker_hub", + "json_schema": { + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "user": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "namespace": { + "type": ["null", "string"] + }, + "repository_type": { + "type": ["null", "string"] + }, + "status": { + "type": ["null", "integer"] + }, + "description": { + "type": ["null", "string"] + }, + "is_private": { + "type": ["null", "boolean"] + }, + "is_automated": { + "type": ["null", "boolean"] + }, + "can_edit": { + "type": ["null", "boolean"] + }, + "star_count": { + "type": ["null", "integer"] + }, + "pull_count": { + "type": ["null", "integer"] + }, + "last_updated": { + "type": ["null", "string"] + }, + "is_migrated": { + "type": ["null", "boolean"] + }, + "collaborator_count": { + "type": ["null", "integer"] + }, + "affiliation": { + "type": ["null", "string"] + }, + "hub_user": { + "type": ["null", "string"] + } + } + }, + "supported_sync_modes": ["full_refresh"] + } + ] +} diff --git a/airbyte-integrations/connectors/source-dockerhub/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-dockerhub/integration_tests/configured_catalog.json new file mode 100644 index 000000000000..6f8198f3af96 --- /dev/null +++ b/airbyte-integrations/connectors/source-dockerhub/integration_tests/configured_catalog.json @@ -0,0 +1,67 @@ +{ + "streams": [ + { + "stream": { + "name": "docker_hub", + "json_schema": { + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "user": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "namespace": { + "type": ["null", "string"] + }, + "repository_type": { + "type": ["null", "string"] + }, + "status": { + "type": ["null", "integer"] + }, + "description": { + "type": ["null", "string"] + }, + "is_private": { + "type": ["null", "boolean"] + }, + "is_automated": { + "type": ["null", "boolean"] + }, + "can_edit": { + "type": ["null", "boolean"] + }, + "star_count": { + "type": ["null", "integer"] + }, + "pull_count": { + "type": ["null", "integer"] + }, + "last_updated": { + "type": ["null", "string"] + }, + "is_migrated": { + "type": ["null", "boolean"] + }, + "collaborator_count": { + "type": ["null", "integer"] + }, + "affiliation": { + "type": ["null", "string"] + }, + "hub_user": { + "type": ["null", "string"] + } + } + }, + "supported_sync_modes": ["full_refresh"] + }, + "source_defined_cursor": false, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-dockerhub/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-dockerhub/integration_tests/invalid_config.json new file mode 100644 index 000000000000..dc1c9833fc58 --- /dev/null +++ b/airbyte-integrations/connectors/source-dockerhub/integration_tests/invalid_config.json @@ -0,0 +1,3 @@ +{ + "docker_username": "8cf32219-675f-41c3-a879-adc79f6e670e-475f57f0-8037-4ff0-93df-a913fb8fb055" +} diff --git a/airbyte-integrations/connectors/source-dockerhub/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-dockerhub/integration_tests/sample_state.json new file mode 100644 index 000000000000..3587e579822d --- /dev/null +++ b/airbyte-integrations/connectors/source-dockerhub/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "value" + } +} diff --git a/airbyte-integrations/connectors/source-dockerhub/main.py b/airbyte-integrations/connectors/source-dockerhub/main.py new file mode 100644 index 000000000000..e9414316d39d --- /dev/null +++ b/airbyte-integrations/connectors/source-dockerhub/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_dockerhub import SourceDockerhub + +if __name__ == "__main__": + source = SourceDockerhub() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-dockerhub/requirements.txt b/airbyte-integrations/connectors/source-dockerhub/requirements.txt new file mode 100644 index 000000000000..0411042aa091 --- /dev/null +++ b/airbyte-integrations/connectors/source-dockerhub/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-dockerhub/sample_files/config.json b/airbyte-integrations/connectors/source-dockerhub/sample_files/config.json new file mode 100644 index 000000000000..e9c198a5974f --- /dev/null +++ b/airbyte-integrations/connectors/source-dockerhub/sample_files/config.json @@ -0,0 +1,3 @@ +{ + "docker_username": "airbyte" +} diff --git a/airbyte-integrations/connectors/source-dockerhub/setup.py b/airbyte-integrations/connectors/source-dockerhub/setup.py new file mode 100644 index 000000000000..f382fbc56177 --- /dev/null +++ b/airbyte-integrations/connectors/source-dockerhub/setup.py @@ -0,0 +1,27 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = ["airbyte-cdk~=0.1", "requests~=2.28.0"] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_dockerhub", + description="Source implementation for Dockerhub.", + author="Airbyte", + author_email="shawn@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-dockerhub/source_dockerhub/__init__.py b/airbyte-integrations/connectors/source-dockerhub/source_dockerhub/__init__.py new file mode 100644 index 000000000000..4961990cca6c --- /dev/null +++ b/airbyte-integrations/connectors/source-dockerhub/source_dockerhub/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceDockerhub + +__all__ = ["SourceDockerhub"] diff --git a/airbyte-integrations/connectors/source-dockerhub/source_dockerhub/schemas/docker_hub.json b/airbyte-integrations/connectors/source-dockerhub/source_dockerhub/schemas/docker_hub.json new file mode 100644 index 000000000000..f72e7df20c30 --- /dev/null +++ b/airbyte-integrations/connectors/source-dockerhub/source_dockerhub/schemas/docker_hub.json @@ -0,0 +1,54 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "user": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "namespace": { + "type": ["null", "string"] + }, + "repository_type": { + "type": ["null", "string"] + }, + "status": { + "type": ["null", "integer"] + }, + "description": { + "type": ["null", "string"] + }, + "is_private": { + "type": ["null", "boolean"] + }, + "is_automated": { + "type": ["null", "boolean"] + }, + "can_edit": { + "type": ["null", "boolean"] + }, + "star_count": { + "type": ["null", "integer"] + }, + "pull_count": { + "type": ["null", "integer"] + }, + "last_updated": { + "type": ["null", "string"] + }, + "is_migrated": { + "type": ["null", "boolean"] + }, + "collaborator_count": { + "type": ["null", "integer"] + }, + "affiliation": { + "type": ["null", "string"] + }, + "hub_user": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-dockerhub/source_dockerhub/source.py b/airbyte-integrations/connectors/source-dockerhub/source_dockerhub/source.py new file mode 100644 index 000000000000..1e04bebe5a80 --- /dev/null +++ b/airbyte-integrations/connectors/source-dockerhub/source_dockerhub/source.py @@ -0,0 +1,90 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import logging +from typing import Any, Iterable, List, Mapping, Optional, Tuple +from urllib.parse import urlparse + +import requests +from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.streams import Stream +from airbyte_cdk.sources.streams.http import HttpStream + +logger = logging.getLogger("airbyte") + + +class SourceDockerhub(AbstractSource): + jwt = None + + def check_connection(self, logger, config) -> Tuple[bool, any]: + username = config["docker_username"] + + # get JWT + jwt_url = "https://auth.docker.io/token?service=registry.docker.io&scope=repository:library/alpine:pull" + response = requests.get(jwt_url) + self.jwt = response.json()["token"] + + # check that jwt is valid and that username is valid + url = f"https://hub.docker.com/v2/repositories/{username}/" + try: + response = requests.get(url, headers={"Authorization": self.jwt}) + response.raise_for_status() + except requests.exceptions.HTTPError as e: + if e.response.status_code == 401: + logger.info(str(e)) + return False, "Invalid JWT received, check if auth.docker.io changed API" + elif e.response.status_code == 404: + logger.info(str(e)) + return False, f"User '{username}' not found, check if hub.docker.com/u/{username} exists" + else: + logger.info(str(e)) + return False, f"Error getting basic user info for Docker user '{username}', unexpected error" + json_response = response.json() + repocount = json_response["count"] + logger.info(f"Connection check for Docker user '{username}' successful: {repocount} repos found") + return True, None + + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + return [DockerHub(jwt=self.jwt, config=config)] + + +class DockerHub(HttpStream): + url_base = "https://hub.docker.com/v2" + + # Set this as a noop. + primary_key = None + + def __init__(self, jwt: str, config: Mapping[str, Any], **kwargs): + super().__init__() + # Here's where we set the variable from our input to pass it down to the source. + self.jwt = jwt + self.docker_username = config["docker_username"] + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + decoded_response = response.json() + if decoded_response["next"] is None: + return None + else: + para = urlparse(decoded_response["next"]).query + return "?" + para + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = "" + ) -> str: + return f"/v2/repositories/{self.docker_username}/" + str(next_page_token or "") + + def request_headers( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> Mapping[str, Any]: + return {"Authorization": self.jwt} + + def parse_response( + self, + response: requests.Response, + stream_state: Mapping[str, Any], + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> Iterable[Mapping]: + for repository in response.json().get("results"): + yield repository diff --git a/airbyte-integrations/connectors/source-dockerhub/source_dockerhub/spec.yaml b/airbyte-integrations/connectors/source-dockerhub/source_dockerhub/spec.yaml new file mode 100644 index 000000000000..2461d7f0a8d9 --- /dev/null +++ b/airbyte-integrations/connectors/source-dockerhub/source_dockerhub/spec.yaml @@ -0,0 +1,15 @@ +documentationUrl: https://docs.airbyte.io/integrations/sources/dockerhub +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Dockerhub Spec + type: object + required: + - docker_username + additionalProperties: false + properties: + docker_username: + type: string + description: Username of DockerHub person or organization (for https://hub.docker.com/v2/repositories/USERNAME/ API call) + pattern: ^[a-z0-9_\-]+$ + examples: + - airbyte diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/request_headers/__init__.py b/airbyte-integrations/connectors/source-dockerhub/unit_tests/__init__.py similarity index 100% rename from airbyte-cdk/python/unit_tests/sources/declarative/requesters/request_headers/__init__.py rename to airbyte-integrations/connectors/source-dockerhub/unit_tests/__init__.py diff --git a/airbyte-integrations/connectors/source-dockerhub/unit_tests/test_source.py b/airbyte-integrations/connectors/source-dockerhub/unit_tests/test_source.py new file mode 100644 index 000000000000..72b935f199cd --- /dev/null +++ b/airbyte-integrations/connectors/source-dockerhub/unit_tests/test_source.py @@ -0,0 +1,23 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from unittest.mock import MagicMock + +from source_dockerhub.source import SourceDockerhub + + +def test_check_connection(): + source = SourceDockerhub() + logger_mock, config_mock = MagicMock(), { + "docker_username": "airbyte" + } # shouldnt actually ping network request in test but we will skip for now + assert source.check_connection(logger_mock, config_mock) == (True, None) + + +def test_streams(): + source = SourceDockerhub() + config_mock = MagicMock() + streams = source.streams(config_mock) + expected_streams_number = 1 + assert len(streams) == expected_streams_number diff --git a/airbyte-integrations/connectors/source-dockerhub/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-dockerhub/unit_tests/test_streams.py new file mode 100644 index 000000000000..379d9a84cc2e --- /dev/null +++ b/airbyte-integrations/connectors/source-dockerhub/unit_tests/test_streams.py @@ -0,0 +1,52 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from unittest.mock import MagicMock + +import requests +from source_dockerhub.source import DockerHub + + +def test_next_page_token(): + stream = DockerHub(jwt="foo", config={"docker_username": "foo"}) + + # mocking the request with a response that has a next page token + response = requests.Response() + response.url = "https://foo" + response.json = MagicMock() + response.json.return_value = {"next": "https://foo?page=2"} + inputs = {"response": response} + + expected_token = "?page=2" # expected next page token + assert stream.next_page_token(**inputs) == expected_token + + +# cant get this to work - TypeError: 'list' object is not an iterator +# def test_parse_response(patch_base_class, mocker): +# response = mocker.MagicMock() +# response.json.return_value = {"one": 1} +# stream = DockerHub(jwt="foo", config={"docker_username": "foo"}) + +# inputs = { +# "response": response, +# "stream_state": MagicMock(), +# "stream_slice": MagicMock(), +# "next_page_token": MagicMock(), +# } + +# expected_parsed_object = {"one": 1} +# assert next(stream.parse_response(**inputs)) == expected_parsed_object + + +def test_request_headers(): + stream = DockerHub(jwt="foo", config={"docker_username": "foo"}) + + inputs = { + "stream_state": MagicMock(), + "stream_slice": MagicMock(), + "next_page_token": MagicMock(), + } + + expected_headers = {"Authorization": "foo"} + assert stream.request_headers(**inputs) == expected_headers diff --git a/airbyte-integrations/connectors/source-e2e-test-cloud/Dockerfile b/airbyte-integrations/connectors/source-e2e-test-cloud/Dockerfile index c02dd323f6d6..0fbcce65c870 100644 --- a/airbyte-integrations/connectors/source-e2e-test-cloud/Dockerfile +++ b/airbyte-integrations/connectors/source-e2e-test-cloud/Dockerfile @@ -17,5 +17,5 @@ ENV ENABLE_SENTRY true COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=2.1.0 +LABEL io.airbyte.version=2.1.1 LABEL io.airbyte.name=airbyte/source-e2e-test-cloud diff --git a/airbyte-integrations/connectors/source-e2e-test/Dockerfile b/airbyte-integrations/connectors/source-e2e-test/Dockerfile index 0b2ff224a224..6d77c2b77af0 100644 --- a/airbyte-integrations/connectors/source-e2e-test/Dockerfile +++ b/airbyte-integrations/connectors/source-e2e-test/Dockerfile @@ -17,5 +17,5 @@ ENV ENABLE_SENTRY true COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=2.1.0 +LABEL io.airbyte.version=2.1.1 LABEL io.airbyte.name=airbyte/source-e2e-test diff --git a/airbyte-integrations/connectors/source-facebook-marketing/Dockerfile b/airbyte-integrations/connectors/source-facebook-marketing/Dockerfile index 9dafcb4007be..5aeb4ca834d9 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/Dockerfile +++ b/airbyte-integrations/connectors/source-facebook-marketing/Dockerfile @@ -13,5 +13,5 @@ ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.2.50 +LABEL io.airbyte.version=0.2.53 LABEL io.airbyte.name=airbyte/source-facebook-marketing diff --git a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ad_account.json b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ad_account.json index 18a1cc863a53..f6f4ea4f41b8 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ad_account.json +++ b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ad_account.json @@ -191,7 +191,7 @@ "type": ["null", "string"] }, "tax_id": { - "type": ["null", "number"] + "type": ["null", "string"] }, "tax_id_status": { "type": ["null", "number"] diff --git a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ad_sets.json b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ad_sets.json index bb203911eb61..b33dca0342b0 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ad_sets.json +++ b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ad_sets.json @@ -88,6 +88,20 @@ } } }, + "bid_strategy": { + "type": ["null", "string"] + }, + "bid_amount": { + "type": ["null", "number"] + }, + "bid_constraints": { + "type": ["null", "object"], + "properties": { + "roas_average_floor": { + "type": ["null", "integer"] + } + } + }, "adlabels": { "type": ["null", "array"], "items": { diff --git a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/base_insight_streams.py b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/base_insight_streams.py index 839dc4df1695..3754a699a8ff 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/base_insight_streams.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/base_insight_streams.py @@ -111,20 +111,9 @@ def read_records( stream_state: Mapping[str, Any] = None, ) -> Iterable[Mapping[str, Any]]: """Waits for current job to finish (slice) and yield its result""" - - today = pendulum.today(tz="UTC").date() - date_start = stream_state and stream_state.get("date_start") - if date_start: - date_start = pendulum.parse(date_start).date() - job = stream_slice["insight_job"] for obj in job.get_result(): - record = obj.export_all_data() - if date_start: - updated_time = pendulum.parse(record["updated_time"]).date() - if updated_time <= date_start or updated_time >= today: - continue - yield record + yield obj.export_all_data() self._completed_slices.add(job.interval.start) if job.interval.start == self._next_cursor_value: @@ -172,11 +161,9 @@ def get_updated_state(self, current_stream_state: MutableMapping[str, Any], late def _date_intervals(self) -> Iterator[pendulum.Date]: """Get date period to sync""" - yesterday = pendulum.yesterday(tz="UTC").date() - end_date = min(self._end_date, yesterday) - if end_date < self._next_cursor_value: + if self._end_date < self._next_cursor_value: return - date_range = end_date - self._next_cursor_value + date_range = self._end_date - self._next_cursor_value yield from date_range.range("days", self.time_increment) def _advance_cursor(self): @@ -195,14 +182,9 @@ def _generate_async_jobs(self, params: Mapping) -> Iterator[AsyncJob]: :return: """ - today = pendulum.today(tz="UTC").date() - refresh_date = today - self.insights_lookback_period - for ts_start in self._date_intervals(): if ts_start in self._completed_slices: - if ts_start < refresh_date: - continue - self._completed_slices.remove(ts_start) + continue ts_end = ts_start + pendulum.duration(days=self.time_increment - 1) interval = pendulum.Period(ts_start, ts_end) yield InsightAsyncJob(api=self._api.api, edge_object=self._api.account, interval=interval, params=params) @@ -242,7 +224,7 @@ def _get_start_date(self) -> pendulum.Date: :return: the first date to sync """ - today = pendulum.today(tz="UTC").date() + today = pendulum.today().date() oldest_date = today - self.INSIGHTS_RETENTION_PERIOD refresh_date = today - self.insights_lookback_period if self._cursor_value: diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/conftest.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/conftest.py index b00621561dee..422eb569c847 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/conftest.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/conftest.py @@ -54,15 +54,3 @@ def api_fixture(some_config, requests_mock, fb_account_response): requests_mock.register_uri("GET", FacebookSession.GRAPH + f"/{FB_API_VERSION}/me/adaccounts", [fb_account_response]) requests_mock.register_uri("GET", FacebookSession.GRAPH + f"/{FB_API_VERSION}/act_{some_config['account_id']}/", [fb_account_response]) return api - - -@fixture -def set_today(mocker, monkeypatch): - def inner(date: str): - today = pendulum.parse(date) - yesterday = today - pendulum.duration(days=1) - monkeypatch.setattr(pendulum, "today", mocker.MagicMock(return_value=today)) - monkeypatch.setattr(pendulum, "yesterday", mocker.MagicMock(return_value=yesterday)) - return yesterday, today - - return inner diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/helpers.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/helpers.py index e26e4747a284..afff7653e69c 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/helpers.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/helpers.py @@ -2,53 +2,4 @@ # Copyright (c) 2022 Airbyte, Inc., all rights reserved. # - -from typing import Any, MutableMapping - -from airbyte_cdk.models import SyncMode -from airbyte_cdk.sources.streams import Stream - - -def read_full_refresh(stream_instance: Stream): - records = [] - slices = stream_instance.stream_slices(sync_mode=SyncMode.full_refresh) - for slice in slices: - records.extend(list(stream_instance.read_records(stream_slice=slice, sync_mode=SyncMode.full_refresh))) - return records - - -def read_incremental(stream_instance: Stream, stream_state: MutableMapping[str, Any]): - records = [] - stream_instance.state = stream_state - slices = stream_instance.stream_slices(sync_mode=SyncMode.incremental, stream_state=stream_state) - for slice in slices: - records.extend(list(stream_instance.read_records(sync_mode=SyncMode.incremental, stream_slice=slice, stream_state=stream_state))) - stream_state.clear() - stream_state.update(stream_instance.state) - return records - - -class FakeInsightAsyncJobManager: - def __init__(self, jobs, **kwargs): - self.jobs = jobs - - def completed_jobs(self): - yield from self.jobs - - -class FakeInsightAsyncJob: - updated_insights = {} - - @classmethod - def update_insight(cls, date_start, updated_time): - cls.updated_insights[date_start] = updated_time - - def __init__(self, interval, **kwargs): - self.interval = interval - - def get_result(self): - return [self] - - def export_all_data(self): - date_start = str(self.interval.start) - return {"date_start": date_start, "updated_time": self.updated_insights.get(date_start, date_start)} +# diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_base_insight_streams.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_base_insight_streams.py index 7036b4b40f44..2b0e8cbd2e58 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_base_insight_streams.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_base_insight_streams.py @@ -6,9 +6,7 @@ import pendulum import pytest -import source_facebook_marketing.streams.base_insight_streams from airbyte_cdk.models import SyncMode -from helpers import FakeInsightAsyncJob, FakeInsightAsyncJobManager, read_full_refresh, read_incremental from pendulum import duration from source_facebook_marketing.streams import AdsInsights from source_facebook_marketing.streams.async_job import AsyncJob, InsightAsyncJob @@ -182,7 +180,7 @@ def test_stream_slices_no_state_close_to_now(self, api, async_manager_mock, rece async_manager_mock.assert_called_once() args, kwargs = async_manager_mock.call_args generated_jobs = list(kwargs["jobs"]) - assert len(generated_jobs) == (end_date - start_date).days + assert len(generated_jobs) == (end_date - start_date).days + 1 assert generated_jobs[0].interval.start == start_date.date() assert generated_jobs[1].interval.start == start_date.date() + duration(days=1) @@ -219,7 +217,7 @@ def test_stream_slices_with_state_close_to_now(self, api, async_manager_mock, re async_manager_mock.assert_called_once() args, kwargs = async_manager_mock.call_args generated_jobs = list(kwargs["jobs"]) - assert len(generated_jobs) == (end_date - start_date).days + assert len(generated_jobs) == (end_date - start_date).days + 1 assert generated_jobs[0].interval.start == start_date.date() assert generated_jobs[1].interval.start == start_date.date() + duration(days=1) @@ -292,72 +290,3 @@ def test_fields_custom(self, api): ) assert stream.fields == ["account_id", "account_currency"] - - def test_completed_slices_in_lookback_period(self, api, monkeypatch, set_today): - start_date = pendulum.parse("2020-03-01") - end_date = pendulum.parse("2020-05-01") - set_today("2020-04-01") - - monkeypatch.setattr(source_facebook_marketing.streams.base_insight_streams, "InsightAsyncJob", FakeInsightAsyncJob) - monkeypatch.setattr(source_facebook_marketing.streams.base_insight_streams, "InsightAsyncJobManager", FakeInsightAsyncJobManager) - - state = { - AdsInsights.cursor_field: "2020-03-19", - "slices": [ - "2020-03-21", - "2020-03-22", - "2020-03-23", - ], - "time_increment": 1, - } - - stream = AdsInsights(api=api, start_date=start_date, end_date=end_date, insights_lookback_window=10) - stream.state = state - assert stream._completed_slices == {pendulum.Date(2020, 3, 21), pendulum.Date(2020, 3, 22), pendulum.Date(2020, 3, 23)} - - slices = stream.stream_slices(stream_state=state, sync_mode=SyncMode.incremental) - slices = [x["insight_job"].interval.start for x in slices] - - assert pendulum.parse("2020-03-21").date() not in slices - assert pendulum.parse("2020-03-22").date() in slices - assert pendulum.parse("2020-03-23").date() in slices - assert stream._completed_slices == {pendulum.Date(2020, 3, 21)} - - def test_incremental_lookback_period_updated(self, api, monkeypatch, set_today): - start_date = pendulum.parse("2020-03-01") - end_date = pendulum.parse("2020-05-01") - yesterday, _ = set_today("2020-04-01") - - monkeypatch.setattr(source_facebook_marketing.streams.base_insight_streams, "InsightAsyncJob", FakeInsightAsyncJob) - monkeypatch.setattr(source_facebook_marketing.streams.base_insight_streams, "InsightAsyncJobManager", FakeInsightAsyncJobManager) - - stream = AdsInsights(api=api, start_date=start_date, end_date=end_date, insights_lookback_window=20) - - records = read_full_refresh(stream) - assert len(records) == (yesterday - start_date).days + 1 - assert records[0]["date_start"] == str(start_date.date()) - assert records[-1]["date_start"] == str(yesterday.date()) - - state = {AdsInsights.cursor_field: "2020-03-20", "time_increment": 1} - records = read_incremental(stream, state) - assert len(records) == (yesterday - pendulum.parse("2020-03-20")).days - assert records[0]["date_start"] == "2020-03-21" - assert records[-1]["date_start"] == str(yesterday.date()) - assert state == {"date_start": str(yesterday.date()), "slices": [], "time_increment": 1} - - yesterday, _ = set_today("2020-04-02") - records = read_incremental(stream, state) - assert records == [{"date_start": str(yesterday.date()), "updated_time": str(yesterday.date())}] - assert state == {"date_start": str(yesterday.date()), "slices": [], "time_increment": 1} - - yesterday, _ = set_today("2020-04-03") - FakeInsightAsyncJob.update_insight("2020-03-26", "2020-04-01") - FakeInsightAsyncJob.update_insight("2020-03-27", "2020-04-02") - FakeInsightAsyncJob.update_insight("2020-03-28", "2020-04-03") - - records = read_incremental(stream, state) - assert records == [ - {"date_start": "2020-03-27", "updated_time": "2020-04-02"}, - {"date_start": "2020-04-02", "updated_time": "2020-04-02"}, - ] - assert state == {"date_start": str(yesterday.date()), "slices": [], "time_increment": 1} diff --git a/airbyte-integrations/connectors/source-faker/Dockerfile b/airbyte-integrations/connectors/source-faker/Dockerfile index bc9d825d4109..70f432803a07 100644 --- a/airbyte-integrations/connectors/source-faker/Dockerfile +++ b/airbyte-integrations/connectors/source-faker/Dockerfile @@ -34,5 +34,5 @@ COPY source_faker ./source_faker ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.4 +LABEL io.airbyte.version=0.1.5 LABEL io.airbyte.name=airbyte/source-faker diff --git a/airbyte-integrations/connectors/source-faker/source_faker/products.json b/airbyte-integrations/connectors/source-faker/source_faker/products.json index e58bd4c5b670..2381a1a3d9b2 100644 --- a/airbyte-integrations/connectors/source-faker/source_faker/products.json +++ b/airbyte-integrations/connectors/source-faker/source_faker/products.json @@ -5,7 +5,7 @@ "model": "MX-5", "year": 2008, "price": 2869, - "created_at": "2022-02-01 17:02:19" + "created_at": "2022-02-01T17:02:19Z" }, { "id": 2, @@ -13,7 +13,7 @@ "model": "C-Class", "year": 2009, "price": 42397, - "created_at": "2021-01-25 14:31:33" + "created_at": "2021-01-25T14:31:33" }, { "id": 3, @@ -21,7 +21,7 @@ "model": "Accord Crosstour", "year": 2011, "price": 63293, - "created_at": "2021-02-11 05:36:03" + "created_at": "2021-02-11T05:36:03Z" }, { "id": 4, @@ -29,7 +29,7 @@ "model": "Jimmy", "year": 1998, "price": 34079, - "created_at": "2022-01-24 03:00:03" + "created_at": "2022-01-24T03:00:03Z" }, { "id": 5, @@ -37,7 +37,7 @@ "model": "FX", "year": 2004, "price": 17036, - "created_at": "2021-10-02 03:55:44" + "created_at": "2021-10-02T03:55:44Z" }, { "id": 6, @@ -45,7 +45,7 @@ "model": "Intrepid", "year": 2002, "price": 65498, - "created_at": "2022-01-18 00:41:08" + "created_at": "2022-01-18T00:41:08Z" }, { "id": 7, @@ -53,7 +53,7 @@ "model": "Frontier", "year": 2005, "price": 14516, - "created_at": "2021-04-22 16:37:44" + "created_at": "2021-04-22T16:37:44Z" }, { "id": 8, @@ -61,7 +61,7 @@ "model": "Express 1500", "year": 2007, "price": 13023, - "created_at": "2021-07-12 07:13:04" + "created_at": "2021-07-12T07:13:04Z" }, { "id": 9, @@ -69,7 +69,7 @@ "model": "Continental GTC", "year": 2008, "price": 43458, - "created_at": "2021-03-17 05:43:15" + "created_at": "2021-03-17T05:43:15Z" }, { "id": 10, @@ -77,7 +77,7 @@ "model": "DTS", "year": 2008, "price": 43859, - "created_at": "2021-08-12 07:33:58" + "created_at": "2021-08-12T07:33:58Z" }, { "id": 11, @@ -85,7 +85,7 @@ "model": "Ram 2500", "year": 2000, "price": 82904, - "created_at": "2021-09-03 10:51:16" + "created_at": "2021-09-03T10:51:16Z" }, { "id": 12, @@ -93,7 +93,7 @@ "model": "SJ 410", "year": 1984, "price": 38667, - "created_at": "2021-01-11 00:15:46" + "created_at": "2021-01-11T00:15:46Z" }, { "id": 13, @@ -101,7 +101,7 @@ "model": "S4", "year": 2005, "price": 2391, - "created_at": "2021-09-06 03:31:10" + "created_at": "2021-09-06T03:31:10Z" }, { "id": 14, @@ -109,7 +109,7 @@ "model": "Suburban 2500", "year": 1998, "price": 55733, - "created_at": "2021-10-18 17:26:05" + "created_at": "2021-10-18T17:26:05Z" }, { "id": 15, @@ -117,7 +117,7 @@ "model": "Ranger", "year": 2000, "price": 20228, - "created_at": "2022-03-24 04:03:19" + "created_at": "2022-03-24T04:03:19Z" }, { "id": 16, @@ -125,7 +125,7 @@ "model": "Corvette", "year": 2009, "price": 75052, - "created_at": "2021-12-31 03:38:21" + "created_at": "2021-12-31T03:38:21Z" }, { "id": 17, @@ -133,7 +133,7 @@ "model": "Pajero", "year": 1993, "price": 84058, - "created_at": "2021-10-15 00:25:34" + "created_at": "2021-10-15T00:25:34Z" }, { "id": 18, @@ -141,7 +141,7 @@ "model": "LS", "year": 2002, "price": 34081, - "created_at": "2022-02-14 22:12:01" + "created_at": "2022-02-14T22:12:01Z" }, { "id": 19, @@ -149,7 +149,7 @@ "model": "Magnum", "year": 2005, "price": 85545, - "created_at": "2021-07-25 22:49:48" + "created_at": "2021-07-25T22:49:48Z" }, { "id": 20, @@ -157,7 +157,7 @@ "model": "Grand Am", "year": 2001, "price": 54837, - "created_at": "2021-10-15 14:08:30" + "created_at": "2021-10-15T14:08:30Z" }, { "id": 21, @@ -165,7 +165,7 @@ "model": "Suburban 1500", "year": 2006, "price": 89410, - "created_at": "2021-03-23 15:40:43" + "created_at": "2021-03-23T15:40:43Z" }, { "id": 22, @@ -173,7 +173,7 @@ "model": "Sierra 1500", "year": 2005, "price": 14288, - "created_at": "2021-08-30 13:40:04" + "created_at": "2021-08-30T13:40:04Z" }, { "id": 23, @@ -181,7 +181,7 @@ "model": "3500", "year": 1995, "price": 12011, - "created_at": "2022-04-24 13:11:08" + "created_at": "2022-04-24T13:11:08Z" }, { "id": 24, @@ -189,7 +189,7 @@ "model": "Mazda5", "year": 2006, "price": 6393, - "created_at": "2021-07-07 14:14:33" + "created_at": "2021-07-07T14:14:33Z" }, { "id": 25, @@ -197,7 +197,7 @@ "model": "Camaro", "year": 1967, "price": 71590, - "created_at": "2021-01-10 21:50:22" + "created_at": "2021-01-10T21:50:22Z" }, { "id": 26, @@ -205,7 +205,7 @@ "model": "Explorer Sport Trac", "year": 2010, "price": 23498, - "created_at": "2022-04-20 00:52:20" + "created_at": "2022-04-20T00:52:20Z" }, { "id": 27, @@ -213,7 +213,7 @@ "model": "Caravan", "year": 1985, "price": 50071, - "created_at": "2022-01-05 10:13:31" + "created_at": "2022-01-05T10:13:31Z" }, { "id": 28, @@ -221,7 +221,7 @@ "model": "240SX", "year": 1992, "price": 38379, - "created_at": "2022-04-07 04:48:48" + "created_at": "2022-04-07T04:48:48Z" }, { "id": 29, @@ -229,7 +229,7 @@ "model": "Intrigue", "year": 2002, "price": 21376, - "created_at": "2021-10-01 13:30:49" + "created_at": "2021-10-01T13:30:49Z" }, { "id": 30, @@ -237,7 +237,7 @@ "model": "TT", "year": 2011, "price": 40893, - "created_at": "2021-02-28 23:06:37" + "created_at": "2021-02-28T23:06:37Z" }, { "id": 31, @@ -245,7 +245,7 @@ "model": "Crown Victoria", "year": 2006, "price": 86225, - "created_at": "2021-01-28 23:33:27" + "created_at": "2021-01-28T23:33:27Z" }, { "id": 32, @@ -253,7 +253,7 @@ "model": "Tacoma", "year": 2003, "price": 73558, - "created_at": "2022-01-28 22:02:04" + "created_at": "2022-01-28T22:02:04Z" }, { "id": 33, @@ -261,7 +261,7 @@ "model": "Regal", "year": 1994, "price": 32279, - "created_at": "2022-04-04 13:35:49" + "created_at": "2022-04-04T13:35:49Z" }, { "id": 34, @@ -269,7 +269,7 @@ "model": "C-Class", "year": 2001, "price": 98732, - "created_at": "2021-03-30 23:16:05" + "created_at": "2021-03-30T23:16:05Z" }, { "id": 35, @@ -277,7 +277,7 @@ "model": "Sierra 3500", "year": 2002, "price": 48267, - "created_at": "2021-07-30 20:29:51" + "created_at": "2021-07-30T20:29:51Z" }, { "id": 36, @@ -285,7 +285,7 @@ "model": "G6", "year": 2005, "price": 16766, - "created_at": "2021-03-24 07:53:33" + "created_at": "2021-03-24T07:53:33Z" }, { "id": 37, @@ -293,7 +293,7 @@ "model": "Outback Sport", "year": 2002, "price": 34523, - "created_at": "2021-12-23 22:47:32" + "created_at": "2021-12-23T22:47:32Z" }, { "id": 38, @@ -301,7 +301,7 @@ "model": "F430", "year": 2007, "price": 31677, - "created_at": "2021-01-11 04:49:57" + "created_at": "2021-01-11T04:49:57Z" }, { "id": 39, @@ -309,7 +309,7 @@ "model": "Montero", "year": 2003, "price": 67136, - "created_at": "2021-05-10 07:37:56" + "created_at": "2021-05-10T07:37:56Z" }, { "id": 40, @@ -317,7 +317,7 @@ "model": "Sentra", "year": 1993, "price": 78236, - "created_at": "2021-11-10 23:48:26" + "created_at": "2021-11-10T23:48:26Z" }, { "id": 41, @@ -325,7 +325,7 @@ "model": "3000GT", "year": 1993, "price": 58150, - "created_at": "2021-09-08 06:55:22" + "created_at": "2021-09-08T06:55:22Z" }, { "id": 42, @@ -333,7 +333,7 @@ "model": "E350", "year": 2012, "price": 55270, - "created_at": "2021-03-24 13:17:37" + "created_at": "2021-03-24T13:17:37Z" }, { "id": 43, @@ -341,7 +341,7 @@ "model": "Taurus", "year": 1987, "price": 13522, - "created_at": "2021-10-27 21:03:59" + "created_at": "2021-10-27T21:03:59Z" }, { "id": 44, @@ -349,7 +349,7 @@ "model": "Avalanche", "year": 2012, "price": 9862, - "created_at": "2021-07-13 12:22:26" + "created_at": "2021-07-13T12:22:26Z" }, { "id": 45, @@ -357,7 +357,7 @@ "model": "Charger", "year": 2012, "price": 81887, - "created_at": "2021-04-24 01:48:24" + "created_at": "2021-04-24T01:48:24Z" }, { "id": 46, @@ -365,7 +365,7 @@ "model": "S-Type", "year": 2005, "price": 34372, - "created_at": "2021-04-03 08:56:17" + "created_at": "2021-04-03T08:56:17Z" }, { "id": 47, @@ -373,7 +373,7 @@ "model": "Grand Voyager", "year": 1994, "price": 90637, - "created_at": "2022-04-21 09:21:08" + "created_at": "2022-04-21T09:21:08Z" }, { "id": 48, @@ -381,7 +381,7 @@ "model": "6000", "year": 1989, "price": 65165, - "created_at": "2021-10-30 13:03:07" + "created_at": "2021-10-30T13:03:07Z" }, { "id": 49, @@ -389,7 +389,7 @@ "model": "IS", "year": 2006, "price": 22434, - "created_at": "2021-01-16 10:45:52" + "created_at": "2021-01-16T10:45:52Z" }, { "id": 50, @@ -397,7 +397,7 @@ "model": "VehiCROSS", "year": 2001, "price": 38180, - "created_at": "2021-12-13 16:29:27" + "created_at": "2021-12-13T16:29:27Z" }, { "id": 51, @@ -405,7 +405,7 @@ "model": "Regal", "year": 2000, "price": 38680, - "created_at": "2021-12-29 22:25:54" + "created_at": "2021-12-29T22:25:54Z" }, { "id": 52, @@ -413,7 +413,7 @@ "model": "E-Class", "year": 2007, "price": 51556, - "created_at": "2021-07-06 11:42:23" + "created_at": "2021-07-06T11:42:23Z" }, { "id": 53, @@ -421,7 +421,7 @@ "model": "LeSabre", "year": 2001, "price": 10904, - "created_at": "2022-01-05 18:23:35" + "created_at": "2022-01-05T18:23:35Z" }, { "id": 54, @@ -429,7 +429,7 @@ "model": "928", "year": 1989, "price": 70917, - "created_at": "2022-01-02 23:16:45" + "created_at": "2022-01-02T23:16:45Z" }, { "id": 55, @@ -437,7 +437,7 @@ "model": "RX", "year": 2007, "price": 5212, - "created_at": "2021-07-10 15:02:53" + "created_at": "2021-07-10T15:02:53Z" }, { "id": 56, @@ -445,7 +445,7 @@ "model": "Econoline E250", "year": 1996, "price": 75095, - "created_at": "2021-02-04 16:17:18" + "created_at": "2021-02-04T16:17:18Z" }, { "id": 57, @@ -453,7 +453,7 @@ "model": "Blazer", "year": 2001, "price": 61918, - "created_at": "2021-12-08 07:25:30" + "created_at": "2021-12-08T07:25:30Z" }, { "id": 58, @@ -461,7 +461,7 @@ "model": "Savana 3500", "year": 2003, "price": 30307, - "created_at": "2021-11-21 23:11:45" + "created_at": "2021-11-21T23:11:45Z" }, { "id": 59, @@ -469,7 +469,7 @@ "model": "M", "year": 2002, "price": 24598, - "created_at": "2021-05-28 04:08:53" + "created_at": "2021-05-28T04:08:53Z" }, { "id": 60, @@ -477,7 +477,7 @@ "model": "S-Series", "year": 1992, "price": 96288, - "created_at": "2021-08-24 04:43:43" + "created_at": "2021-08-24T04:43:43Z" }, { "id": 61, @@ -485,7 +485,7 @@ "model": "Sebring", "year": 2003, "price": 34753, - "created_at": "2021-02-11 11:25:35" + "created_at": "2021-02-11T11:25:35Z" }, { "id": 62, @@ -493,7 +493,7 @@ "model": "Evora", "year": 2010, "price": 42760, - "created_at": "2021-08-31 00:29:05" + "created_at": "2021-08-31T00:29:05Z" }, { "id": 63, @@ -501,7 +501,7 @@ "model": "Wrangler", "year": 2011, "price": 8684, - "created_at": "2021-06-24 10:38:05" + "created_at": "2021-06-24T10:38:05Z" }, { "id": 64, @@ -509,7 +509,7 @@ "model": "Expedition", "year": 2012, "price": 25653, - "created_at": "2021-07-01 16:13:20" + "created_at": "2021-07-01T16:13:20Z" }, { "id": 65, @@ -517,7 +517,7 @@ "model": "Avalanche 2500", "year": 2006, "price": 3158, - "created_at": "2021-08-14 10:55:13" + "created_at": "2021-08-14T10:55:13Z" }, { "id": 66, @@ -525,7 +525,7 @@ "model": "Mazda3", "year": 2012, "price": 79820, - "created_at": "2021-05-25 21:55:52" + "created_at": "2021-05-25T21:55:52Z" }, { "id": 67, @@ -533,7 +533,7 @@ "model": "Tacoma", "year": 2005, "price": 73572, - "created_at": "2021-01-22 09:56:02" + "created_at": "2021-01-22T09:56:02Z" }, { "id": 68, @@ -541,7 +541,7 @@ "model": "Explorer Sport", "year": 2000, "price": 64579, - "created_at": "2021-02-16 06:56:06" + "created_at": "2021-02-16T06:56:06Z" }, { "id": 69, @@ -549,7 +549,7 @@ "model": "Savana Cargo Van", "year": 2006, "price": 65944, - "created_at": "2021-09-12 14:08:53" + "created_at": "2021-09-12T14:08:53Z" }, { "id": 70, @@ -557,7 +557,7 @@ "model": "HHR", "year": 2009, "price": 8953, - "created_at": "2021-08-17 04:25:43" + "created_at": "2021-08-17T04:25:43Z" }, { "id": 71, @@ -565,7 +565,7 @@ "model": "Bronco II", "year": 1989, "price": 41811, - "created_at": "2021-07-14 14:20:28" + "created_at": "2021-07-14T14:20:28Z" }, { "id": 72, @@ -573,7 +573,7 @@ "model": "Suburban 2500", "year": 2011, "price": 57488, - "created_at": "2021-09-22 12:32:57" + "created_at": "2021-09-22T12:32:57Z" }, { "id": 73, @@ -581,7 +581,7 @@ "model": "Grand Vitara", "year": 2008, "price": 6408, - "created_at": "2021-11-12 23:19:52" + "created_at": "2021-11-12T23:19:52Z" }, { "id": 74, @@ -589,7 +589,7 @@ "model": "Mazda6", "year": 2012, "price": 14805, - "created_at": "2021-06-01 01:55:32" + "created_at": "2021-06-01T01:55:32Z" }, { "id": 75, @@ -597,7 +597,7 @@ "model": "Tahoe", "year": 1998, "price": 33585, - "created_at": "2022-01-09 04:28:54" + "created_at": "2022-01-09T04:28:54Z" }, { "id": 76, @@ -605,7 +605,7 @@ "model": "Explorer Sport Trac", "year": 2010, "price": 2087, - "created_at": "2022-03-28 00:28:16" + "created_at": "2022-03-28T00:28:16Z" }, { "id": 77, @@ -613,7 +613,7 @@ "model": "F150", "year": 2007, "price": 17621, - "created_at": "2021-03-23 15:08:10" + "created_at": "2021-03-23T15:08:10Z" }, { "id": 78, @@ -621,7 +621,7 @@ "model": "Taurus", "year": 1995, "price": 16478, - "created_at": "2021-06-07 22:29:50" + "created_at": "2021-06-07T22:29:50Z" }, { "id": 79, @@ -629,7 +629,7 @@ "model": "Truck", "year": 1992, "price": 70616, - "created_at": "2022-01-30 05:14:02" + "created_at": "2022-01-30T05:14:02Z" }, { "id": 80, @@ -637,7 +637,7 @@ "model": "Colt", "year": 1994, "price": 34163, - "created_at": "2022-04-02 18:06:30" + "created_at": "2022-04-02T18:06:30Z" }, { "id": 81, @@ -645,7 +645,7 @@ "model": "RX-7", "year": 1991, "price": 29634, - "created_at": "2021-01-06 10:30:59" + "created_at": "2021-01-06T10:30:59Z" }, { "id": 82, @@ -653,7 +653,7 @@ "model": "Grand Prix", "year": 1984, "price": 88575, - "created_at": "2021-02-24 06:06:57" + "created_at": "2021-02-24T06:06:57Z" }, { "id": 83, @@ -661,7 +661,7 @@ "model": "Mazdaspeed 3", "year": 2012, "price": 77723, - "created_at": "2021-11-11 22:48:05" + "created_at": "2021-11-11T22:48:05Z" }, { "id": 84, @@ -669,7 +669,7 @@ "model": "Spider", "year": 1992, "price": 64288, - "created_at": "2021-01-06 03:50:27" + "created_at": "2021-01-06T03:50:27Z" }, { "id": 85, @@ -677,7 +677,7 @@ "model": "S8", "year": 2002, "price": 33718, - "created_at": "2021-07-21 11:14:54" + "created_at": "2021-07-21T11:14:54Z" }, { "id": 86, @@ -685,7 +685,7 @@ "model": "Amigo", "year": 1992, "price": 53335, - "created_at": "2022-03-02 10:42:21" + "created_at": "2022-03-02T10:42:21Z" }, { "id": 87, @@ -693,7 +693,7 @@ "model": "Paseo", "year": 1996, "price": 74558, - "created_at": "2021-10-02 14:54:58" + "created_at": "2021-10-02 14:54:58Z" }, { "id": 88, @@ -701,7 +701,7 @@ "model": "Continental Mark VII", "year": 1986, "price": 42150, - "created_at": "2021-10-02 04:48:53" + "created_at": "2021-10-02T04:48:53Z" }, { "id": 89, @@ -709,7 +709,7 @@ "model": "Dakota", "year": 1997, "price": 64516, - "created_at": "2021-09-09 23:13:26" + "created_at": "2021-09-09T23:13:26Z" }, { "id": 90, @@ -717,7 +717,7 @@ "model": "Tahoe", "year": 1998, "price": 51461, - "created_at": "2021-04-06 08:29:19" + "created_at": "2021-04-06T08:29:19Z" }, { "id": 91, @@ -725,7 +725,7 @@ "model": "Vibe", "year": 2006, "price": 12134, - "created_at": "2021-01-11 22:30:14" + "created_at": "2021-01-11T22:30:14Z" }, { "id": 92, @@ -733,7 +733,7 @@ "model": "Eos", "year": 2011, "price": 53128, - "created_at": "2021-01-12 23:25:06" + "created_at": "2021-01-12T23:25:06Z" }, { "id": 93, @@ -741,7 +741,7 @@ "model": "Mazdaspeed6", "year": 2007, "price": 90902, - "created_at": "2021-12-29 14:29:03" + "created_at": "2021-12-29T14:29:03Z" }, { "id": 94, @@ -749,7 +749,7 @@ "model": "Xterra", "year": 2005, "price": 41532, - "created_at": "2021-09-07 09:00:49" + "created_at": "2021-09-07 09:00:49Z" }, { "id": 95, @@ -757,7 +757,7 @@ "model": "Sable", "year": 2005, "price": 71337, - "created_at": "2021-01-31 22:13:44" + "created_at": "2021-01-31T22:13:44Z" }, { "id": 96, @@ -765,7 +765,7 @@ "model": "330", "year": 2006, "price": 14494, - "created_at": "2021-09-17 20:52:48" + "created_at": "2021-09-17T20:52:48Z" }, { "id": 97, @@ -773,7 +773,7 @@ "model": "R8", "year": 2008, "price": 17642, - "created_at": "2021-09-21 11:56:24" + "created_at": "2021-09-21T11:56:24Z" }, { "id": 98, @@ -781,7 +781,7 @@ "model": "CTS-V", "year": 2007, "price": 19914, - "created_at": "2021-09-02 15:38:46" + "created_at": "2021-09-02T15:38:46Z" }, { "id": 99, @@ -789,7 +789,7 @@ "model": "1500 Club Coupe", "year": 1997, "price": 82288, - "created_at": "2021-04-20 18:58:15" + "created_at": "2021-04-20T18:58:15Z" }, { "id": 100, @@ -797,6 +797,6 @@ "model": "Somerset", "year": 1986, "price": 64148, - "created_at": "2021-06-10 19:07:38" + "created_at": "2021-06-10T19:07:38Z" } ] diff --git a/airbyte-integrations/connectors/source-faker/source_faker/source.py b/airbyte-integrations/connectors/source-faker/source_faker/source.py index 5873e31e8866..ac9eda4c1193 100644 --- a/airbyte-integrations/connectors/source-faker/source_faker/source.py +++ b/airbyte-integrations/connectors/source-faker/source_faker/source.py @@ -176,9 +176,16 @@ def get_stream_cursor(state: Dict[str, any], stream: str) -> int: def generate_record(stream: any, data: any): + dict = data.copy() + + # timestamps need to be emitted in ISO format + for key in dict: + if isinstance(dict[key], datetime.datetime): + dict[key] = dict[key].isoformat() + return AirbyteMessage( type=Type.RECORD, - record=AirbyteRecordMessage(stream=stream.stream.name, data=data, emitted_at=int(datetime.datetime.now().timestamp()) * 1000), + record=AirbyteRecordMessage(stream=stream.stream.name, data=dict, emitted_at=int(datetime.datetime.now().timestamp()) * 1000), ) diff --git a/airbyte-integrations/connectors/source-file/source_file/spec.json b/airbyte-integrations/connectors/source-file/source_file/spec.json index e48c273a3b5a..834323a65968 100644 --- a/airbyte-integrations/connectors/source-file/source_file/spec.json +++ b/airbyte-integrations/connectors/source-file/source_file/spec.json @@ -24,7 +24,7 @@ "type": "string", "title": "Reader Options", "description": "This should be a string in JSON format. It depends on the chosen file format to provide additional options and tune its behavior.", - "examples": ["{}", "{'sep': ' '}"] + "examples": ["{}", "{\"sep\": \" \"}"] }, "url": { "type": "string", diff --git a/airbyte-integrations/connectors/source-firebolt/.dockerignore b/airbyte-integrations/connectors/source-firebolt/.dockerignore new file mode 100644 index 000000000000..087dfcae5269 --- /dev/null +++ b/airbyte-integrations/connectors/source-firebolt/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_firebolt +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-firebolt/Dockerfile b/airbyte-integrations/connectors/source-firebolt/Dockerfile new file mode 100644 index 000000000000..c02cb10a19f2 --- /dev/null +++ b/airbyte-integrations/connectors/source-firebolt/Dockerfile @@ -0,0 +1,39 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + +RUN apk add libffi-dev + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_firebolt ./source_firebolt + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-firebolt diff --git a/airbyte-integrations/connectors/source-firebolt/README.md b/airbyte-integrations/connectors/source-firebolt/README.md new file mode 100644 index 000000000000..14282a3bf812 --- /dev/null +++ b/airbyte-integrations/connectors/source-firebolt/README.md @@ -0,0 +1,132 @@ +# Firebolt Source + +This is the repository for the Firebolt source connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/firebolt). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.7.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +From the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-firebolt:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/firebolt) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_firebolt/spec.json` file. +Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source firebolt test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py discover --config secrets/config.json +python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-firebolt:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-firebolt:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-firebolt:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-firebolt:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-firebolt:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-firebolt:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. +First install test dependencies into your virtual environment: +``` +pip install .[tests] +``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +python -m pytest unit_tests +``` + +### Integration Tests +There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). +#### Custom Integration tests +Place custom tests inside `integration_tests/` folder, then, from the connector root, run +``` +python -m pytest integration_tests +``` +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. +To run your integration tests with acceptance tests, from the connector root, run +``` +python -m pytest integration_tests -p integration_tests.acceptance +``` +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-firebolt:unitTest +``` +To run acceptance and custom integration tests: + +Make sure you have a running Firebolt engine that was specified in the config.json. It is needed to run the test queries. + +``` +./gradlew :airbyte-integrations:connectors:source-firebolt:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-firebolt/acceptance-test-config.yml b/airbyte-integrations/connectors/source-firebolt/acceptance-test-config.yml new file mode 100644 index 000000000000..5888449a7fc4 --- /dev/null +++ b/airbyte-integrations/connectors/source-firebolt/acceptance-test-config.yml @@ -0,0 +1,26 @@ +# See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-firebolt:dev +tests: + spec: + - spec_path: "source_firebolt/spec.json" + connection: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + - config_path: "secrets/config.json" + basic_read: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + timeout_seconds: 120 + expect_records: + path: "integration_tests/expected_records.txt" + extra_fields: no + exact_order: yes + extra_records: no + full_refresh: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-firebolt/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-firebolt/acceptance-test-docker.sh new file mode 100644 index 000000000000..c51577d10690 --- /dev/null +++ b/airbyte-integrations/connectors/source-firebolt/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-firebolt/bootstrap.md b/airbyte-integrations/connectors/source-firebolt/bootstrap.md new file mode 100644 index 000000000000..fccc42134ac6 --- /dev/null +++ b/airbyte-integrations/connectors/source-firebolt/bootstrap.md @@ -0,0 +1,22 @@ +# Firebolt Source + +## Overview + +Firebolt is a cloud data warehouse purpose-built to provide sub-second analytics performance on massive, terabyte-scale data sets. + +Firebolt has two main concepts: Databases, which denote the storage of data and Engines, which describe the compute layer on top of a Database. + +Firebolt has three types of tables: External, Fact and Dimension. External tables, which represent a raw file structure in storage. Dimension tables, which are optimised for fetching and store data on each node in an Engine. Fact tables are similar to Dimension, but they shard the data across the nodes. The usual workload is to write source data into a set of files on S3, wrap them with an External table and write this data to a fetch-optimised Fact or Dimension table. + +## Connector + +This connector uses [firebolt-sdk](https://pypi.org/project/firebolt-sdk/), which is a [PEP-249](https://peps.python.org/pep-0249/) DB API implementation. +`Connection` object is used to connect to a specified Engine, wich runs subsequent queries against the data stored in the Database using the `Cursor` object. + +## Notes + +* External tables are not available as a source for performance reasons. +* Views are not available as a source due to possible complicated structure and non-obvious data types. +* Only Full reads are supported for now. +* Integration/Acceptance testing requires the user to have a running engine. Spinning up an engine can take a while so this ensures a faster iteration on the connector. +* Pagination is not available at the moment so large enough data sets might cause out of memory errors \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-firebolt/build.gradle b/airbyte-integrations/connectors/source-firebolt/build.gradle new file mode 100644 index 000000000000..f92bd34f7fb3 --- /dev/null +++ b/airbyte-integrations/connectors/source-firebolt/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_firebolt_singer' +} diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/request_params/__init__.py b/airbyte-integrations/connectors/source-firebolt/integration_tests/__init__.py similarity index 100% rename from airbyte-cdk/python/unit_tests/sources/declarative/requesters/request_params/__init__.py rename to airbyte-integrations/connectors/source-firebolt/integration_tests/__init__.py diff --git a/airbyte-integrations/connectors/source-firebolt/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-firebolt/integration_tests/acceptance.py new file mode 100644 index 000000000000..34f2f625e15b --- /dev/null +++ b/airbyte-integrations/connectors/source-firebolt/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-firebolt/integration_tests/catalog.json b/airbyte-integrations/connectors/source-firebolt/integration_tests/catalog.json new file mode 100644 index 000000000000..8fed5b41ccd6 --- /dev/null +++ b/airbyte-integrations/connectors/source-firebolt/integration_tests/catalog.json @@ -0,0 +1,35 @@ +{ + "streams": [ + { + "name": "airbyte_acceptance_table", + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": false, + "json_schema": { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "column1": { + "type": "string" + }, + "column2": { + "type": "number" + }, + "column3": { + "type": "string", + "format": "datetime", + "airbyte_type": "timestamp_without_timezone" + }, + "column4": { + "type": "number" + }, + "column5": { + "type": "array", + "items": { + "type": "integer" + } + } + } + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-firebolt/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-firebolt/integration_tests/configured_catalog.json new file mode 100644 index 000000000000..b64ba92617a3 --- /dev/null +++ b/airbyte-integrations/connectors/source-firebolt/integration_tests/configured_catalog.json @@ -0,0 +1,38 @@ +{ + "streams": [ + { + "stream": { + "name": "airbyte_acceptance_table", + "supported_sync_modes": ["full_refresh"], + "source_defined_cursor": false, + "json_schema": { + "type": "object", + "properties": { + "column1": { + "type": "string" + }, + "column2": { + "type": "number" + }, + "column3": { + "type": "string", + "format": "datetime", + "airbyte_type": "timestamp_without_timezone" + }, + "column4": { + "type": "number" + }, + "column5": { + "type": "array", + "items": { + "type": "integer" + } + } + } + } + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + } + ] +} diff --git a/airbyte-integrations/connectors/source-firebolt/integration_tests/expected_records.txt b/airbyte-integrations/connectors/source-firebolt/integration_tests/expected_records.txt new file mode 100644 index 000000000000..828fa3ce95d8 --- /dev/null +++ b/airbyte-integrations/connectors/source-firebolt/integration_tests/expected_records.txt @@ -0,0 +1,2 @@ +{"stream": "airbyte_acceptance_table", "data": {"column1": "my_value", "column2": 221, "column3": "2021-01-01T20:10:22", "column4": 1.214, "column5": [1,2,3]}, "emitted_at": 1626172757000} +{"stream": "airbyte_acceptance_table", "data": {"column1": "my_value2", "column2": 222, "column3": "2021-01-02T22:10:22", "column5": [1,2,null]}, "emitted_at": 1626172757000} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-firebolt/integration_tests/integration_test.py b/airbyte-integrations/connectors/source-firebolt/integration_tests/integration_test.py new file mode 100644 index 000000000000..e03a428f5794 --- /dev/null +++ b/airbyte-integrations/connectors/source-firebolt/integration_tests/integration_test.py @@ -0,0 +1,141 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import random +import string +from json import load +from typing import Dict, Generator +from unittest.mock import MagicMock + +from airbyte_cdk.models import Status +from airbyte_cdk.models.airbyte_protocol import ( + AirbyteStream, + ConfiguredAirbyteCatalog, + ConfiguredAirbyteStream, + DestinationSyncMode, + SyncMode, +) +from firebolt.db import Connection +from pytest import fixture +from source_firebolt.source import SourceFirebolt, establish_connection + + +@fixture(scope="module") +def test_table_name() -> str: + letters = string.ascii_lowercase + rnd_string = "".join(random.choice(letters) for i in range(10)) + return f"airbyte_integration_{rnd_string}" + + +@fixture(scope="module") +def create_test_data(config: Dict[str, str], test_table_name: str) -> Generator[Connection, None, None]: + with establish_connection(config, MagicMock()) as connection: + with connection.cursor() as cursor: + cursor.execute( + f"CREATE DIMENSION TABLE {test_table_name} (column1 STRING NULL, column2 INT NULL, column3 DATE NULL, column4 DATETIME NULL, column5 DECIMAL(38, 31) NULL, column6 ARRAY(INT), column7 BOOLEAN NULL)" + ) + cursor.execute( + f"INSERT INTO {test_table_name} VALUES ('my_value',221,'2021-01-01','2021-01-01 12:00:01', Null, [1,2,3], true), ('my_value2',null,'2021-01-02','2021-01-02 12:00:02','1231232.123459999990457054844258706536', [1,2,3], null)" + ) + yield connection + cursor.execute(f"DROP TABLE {test_table_name}") + + +@fixture +def table_schema() -> str: + schema = { + "type": "object", + "properties": { + "column1": {"type": ["null", "string"]}, + "column2": {"type": ["null", "integer"]}, + "column3": {"type": ["null", "string"], "format": "date"}, + "column4": { + "type": ["null", "string"], + "format": "datetime", + "airbyte_type": "timestamp_without_timezone", + }, + "column5": {"type": ["null", "number"]}, # TODO: change once Decimal hits production + "column6": {"type": "array", "items": {"type": ["null", "integer"]}}, + "column7": {"type": ["null", "integer"]}, + }, + } + return schema + + +@fixture +def test_stream(test_table_name: str, table_schema: str) -> AirbyteStream: + return AirbyteStream(name=test_table_name, json_schema=table_schema, supported_sync_modes=[SyncMode.full_refresh]) + + +@fixture +def test_configured_catalogue(test_table_name: str, table_schema: str) -> ConfiguredAirbyteCatalog: + # Deleting one column to simulate manipulation in UI + del table_schema["properties"]["column1"] + append_stream = ConfiguredAirbyteStream( + stream=AirbyteStream(name=test_table_name, json_schema=table_schema), + sync_mode=SyncMode.incremental, + destination_sync_mode=DestinationSyncMode.append, + ) + return ConfiguredAirbyteCatalog(streams=[append_stream]) + + +@fixture(scope="module") +def config() -> Dict[str, str]: + with open( + "secrets/config.json", + ) as f: + yield load(f) + + +@fixture(scope="module") +def invalid_config() -> Dict[str, str]: + with open( + "integration_tests/invalid_config.json", + ) as f: + yield load(f) + + +def test_check_fails(invalid_config: Dict[str, str]): + source = SourceFirebolt() + status = source.check(logger=MagicMock(), config=invalid_config) + assert status.status == Status.FAILED + + +def test_check_succeeds(config: Dict[str, str]): + source = SourceFirebolt() + status = source.check(logger=MagicMock(), config=config) + assert status.status == Status.SUCCEEDED + + +def test_discover( + config: Dict[str, str], create_test_data: Generator[Connection, None, None], test_table_name: str, test_stream: AirbyteStream +): + source = SourceFirebolt() + catalog = source.discover(MagicMock(), config) + assert any(stream.name == test_table_name for stream in catalog.streams), "Test table not found" + for stream in catalog.streams: + if stream.name == test_table_name: + assert stream == test_stream + + +def test_read( + config: Dict[str, str], + create_test_data: Generator[Connection, None, None], + test_table_name: str, + test_configured_catalogue: ConfiguredAirbyteCatalog, +): + expected_data = [ + {"column2": 221, "column3": "2021-01-01", "column4": "2021-01-01T12:00:01", "column6": [1, 2, 3], "column7": 1}, + { + "column3": "2021-01-02", + "column4": "2021-01-02T12:00:02", + "column5": 1231232.12346, # TODO: change once Decimal is in production + "column6": [1, 2, 3], + }, + ] + source = SourceFirebolt() + result = source.read(logger=MagicMock(), config=config, catalog=test_configured_catalogue, state={}) + data = list(result) + assert all([x.record.stream == test_table_name for x in data]), "Table name is incorrect" + assert [x.record.data for x in data] == expected_data, "Test data is not matching" diff --git a/airbyte-integrations/connectors/source-firebolt/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-firebolt/integration_tests/invalid_config.json new file mode 100644 index 000000000000..a7c5e13c48ff --- /dev/null +++ b/airbyte-integrations/connectors/source-firebolt/integration_tests/invalid_config.json @@ -0,0 +1,6 @@ +{ + "username": "xxx", + "password": "xxx", + "database": "non_existing_database_name", + "engine": "database_name_Analytics" +} diff --git a/airbyte-integrations/connectors/source-firebolt/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-firebolt/integration_tests/sample_config.json new file mode 100644 index 000000000000..5f3ba0feb631 --- /dev/null +++ b/airbyte-integrations/connectors/source-firebolt/integration_tests/sample_config.json @@ -0,0 +1,6 @@ +{ + "database": "database_name", + "username": "xxx", + "password": "xxx", + "engine": "database_name_Analytics" +} diff --git a/airbyte-integrations/connectors/source-firebolt/main.py b/airbyte-integrations/connectors/source-firebolt/main.py new file mode 100644 index 000000000000..c7fdca83a929 --- /dev/null +++ b/airbyte-integrations/connectors/source-firebolt/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_firebolt import SourceFirebolt + +if __name__ == "__main__": + source = SourceFirebolt() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-firebolt/requirements.txt b/airbyte-integrations/connectors/source-firebolt/requirements.txt new file mode 100644 index 000000000000..7be17a56d745 --- /dev/null +++ b/airbyte-integrations/connectors/source-firebolt/requirements.txt @@ -0,0 +1,3 @@ +# This file is autogenerated -- only edit if you know what you are doing. Use setup.py for declaring dependencies. +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-firebolt/setup.py b/airbyte-integrations/connectors/source-firebolt/setup.py new file mode 100644 index 000000000000..c2d9148fe28d --- /dev/null +++ b/airbyte-integrations/connectors/source-firebolt/setup.py @@ -0,0 +1,27 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = ["airbyte-cdk", "firebolt-sdk>=0.8.0"] + +TEST_REQUIREMENTS = [ + "pytest>=6.2.5", # 6.2.5 has python10 compatibility fixes + "pytest-asyncio>=0.18.0", + "source-acceptance-test", +] + +setup( + name="source_firebolt", + description="Source implementation for Firebolt.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-firebolt/source_firebolt/__init__.py b/airbyte-integrations/connectors/source-firebolt/source_firebolt/__init__.py new file mode 100644 index 000000000000..51a0626dad03 --- /dev/null +++ b/airbyte-integrations/connectors/source-firebolt/source_firebolt/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceFirebolt + +__all__ = ["SourceFirebolt"] diff --git a/airbyte-integrations/connectors/source-firebolt/source_firebolt/database.py b/airbyte-integrations/connectors/source-firebolt/source_firebolt/database.py new file mode 100644 index 000000000000..6d133fb3cd6c --- /dev/null +++ b/airbyte-integrations/connectors/source-firebolt/source_firebolt/database.py @@ -0,0 +1,94 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import json +from typing import Any, Dict, List + +from airbyte_cdk.logger import AirbyteLogger +from firebolt.async_db import Connection as AsyncConnection +from firebolt.async_db import connect as async_connect +from firebolt.client import DEFAULT_API_URL +from firebolt.client.auth import UsernamePassword +from firebolt.db import Connection, connect + + +def parse_config(config: json, logger: AirbyteLogger) -> Dict[str, Any]: + """ + Convert dict of config values to firebolt.db.Connection arguments + + :param config: json-compatible dict of settings + :param logger: AirbyteLogger instance to print logs. + + :return: dictionary of firebolt.db.Connection-compatible kwargs + """ + connection_args = { + "database": config["database"], + "auth": UsernamePassword(config["username"], config["password"]), + "api_endpoint": config.get("host", DEFAULT_API_URL), + "account_name": config.get("account"), + } + # engine can be a name or a full URL of a cluster + engine = config.get("engine") + if engine: + if "." in engine: + connection_args["engine_url"] = engine + else: + connection_args["engine_name"] = engine + else: + logger.info("Engine parameter was not provided. Connecting to the default engine.") + return connection_args + + +def establish_connection(config: json, logger: AirbyteLogger) -> Connection: + """ + Creates a connection to Firebolt database using the parameters provided. + + :param config: Json object containing db credentials. + :param logger: AirbyteLogger instance to print logs. + + :return: PEP-249 compliant database Connection object. + """ + logger.debug("Connecting to Firebolt.") + connection = connect(**parse_config(config, logger)) + logger.debug("Connection to Firebolt established.") + return connection + + +async def establish_async_connection(config: json, logger: AirbyteLogger) -> AsyncConnection: + """ + Creates an async connection to Firebolt database using the parameters provided. + This connection can be used for parallel operations. + + :param config: Json object containing db credentials. + :param logger: AirbyteLogger instance to print logs. + + :return: PEP-249 compliant database Connection object. + """ + logger.debug("Connecting to Firebolt.") + connection = await async_connect(**parse_config(config, logger)) + logger.debug("Connection to Firebolt established.") + return connection + + +async def get_firebolt_tables(connection: AsyncConnection) -> List[str]: + """ + Fetch a list of tables that are compatible with Airbyte. + Currently this includes Fact and Dimension tables + + :param connection: Connection object connected to a database + + :return: List of table names + """ + query = """ + SELECT + table_name + FROM + information_schema.tables + WHERE + "table_type" IN ('FACT', 'DIMENSION') + """ + cursor = connection.cursor() + await cursor.execute(query) + return [table[0] for table in await cursor.fetchall()] diff --git a/airbyte-integrations/connectors/source-firebolt/source_firebolt/source.py b/airbyte-integrations/connectors/source-firebolt/source_firebolt/source.py new file mode 100644 index 000000000000..b534ffb734b0 --- /dev/null +++ b/airbyte-integrations/connectors/source-firebolt/source_firebolt/source.py @@ -0,0 +1,147 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import json +from asyncio import gather, get_event_loop +from typing import Dict, Generator + +from airbyte_cdk.logger import AirbyteLogger +from airbyte_cdk.models import ( + AirbyteCatalog, + AirbyteConnectionStatus, + AirbyteMessage, + AirbyteStream, + ConfiguredAirbyteCatalog, + Status, + SyncMode, +) +from airbyte_cdk.sources import Source +from firebolt.async_db import Connection as AsyncConnection + +from .database import establish_async_connection, establish_connection, get_firebolt_tables +from .utils import airbyte_message_from_data, convert_type + +SUPPORTED_SYNC_MODES = [SyncMode.full_refresh] + + +async def get_table_stream(connection: AsyncConnection, table: str) -> AirbyteStream: + """ + Get AirbyteStream for a particular table with table structure defined. + + :param connection: Connection object connected to a database + + :return: AirbyteStream object containing the table structure + """ + column_mapping = {} + cursor = connection.cursor() + await cursor.execute(f"SHOW COLUMNS {table}") + for t_name, c_name, c_type, nullable in await cursor.fetchall(): + airbyte_type = convert_type(c_type, nullable) + column_mapping[c_name] = airbyte_type + cursor.close() + json_schema = { + "type": "object", + "properties": column_mapping, + } + return AirbyteStream(name=table, json_schema=json_schema, supported_sync_modes=SUPPORTED_SYNC_MODES) + + +class SourceFirebolt(Source): + def check(self, logger: AirbyteLogger, config: json) -> AirbyteConnectionStatus: + """ + Tests if the input configuration can be used to successfully connect to the integration + e.g: if a provided Stripe API token can be used to connect to the Stripe API. + + :param logger: Logging object to display debug/info/error to the logs + (logs will not be accessible via airbyte UI if they are not passed to this logger) + :param config: Json object containing the configuration of this source, content of this json is as specified in + the properties of the spec.json file + + :return: AirbyteConnectionStatus indicating a Success or Failure + """ + try: + with establish_connection(config, logger) as connection: + # We can only verify correctness of connection parameters on execution + with connection.cursor() as cursor: + cursor.execute("SELECT 1") + return AirbyteConnectionStatus(status=Status.SUCCEEDED) + except Exception as e: + return AirbyteConnectionStatus(status=Status.FAILED, message=f"An exception occurred: {str(e)}") + + def discover(self, logger: AirbyteLogger, config: json) -> AirbyteCatalog: + """ + Returns an AirbyteCatalog representing the available streams and fields in this integration. + For example, given valid credentials to a Postgres database, + returns an Airbyte catalog where each postgres table is a stream, and each table column is a field. + + :param logger: Logging object to display debug/info/error to the logs + (logs will not be accessible via airbyte UI if they are not passed to this logger) + :param config: Json object containing the configuration of this source, content of this json is as specified in + the properties of the spec.json file + + :return: AirbyteCatalog is an object describing a list of all available streams in this source. + A stream is an AirbyteStream object that includes: + - its stream name (or table name in the case of Postgres) + - json_schema providing the specifications of expected schema for this stream (a list of columns described + by their names and types) + """ + + async def get_streams(): + async with await establish_async_connection(config, logger) as connection: + tables = await get_firebolt_tables(connection) + logger.info(f"Found {len(tables)} available tables.") + return await gather(*[get_table_stream(connection, table) for table in tables]) + + loop = get_event_loop() + streams = loop.run_until_complete(get_streams()) + logger.info(f"Provided {len(streams)} streams to the Aribyte Catalog.") + return AirbyteCatalog(streams=streams) + + def read( + self, + logger: AirbyteLogger, + config: json, + catalog: ConfiguredAirbyteCatalog, + state: Dict[str, any], + ) -> Generator[AirbyteMessage, None, None]: + """ + Returns a generator of the AirbyteMessages generated by reading the source with the given configuration, + catalog, and state. + + :param logger: Logging object to display debug/info/error to the logs + (logs will not be accessible via airbyte UI if they are not passed to this logger) + :param config: Json object containing the configuration of this source, content of this json is as specified in + the properties of the spec.json file + :param catalog: The input catalog is a ConfiguredAirbyteCatalog which is almost the same as AirbyteCatalog + returned by discover(), but + in addition, it's been configured in the UI! For each particular stream and field, there may have been provided + with extra modifications such as: filtering streams and/or columns out, renaming some entities, etc + :param state: When a Airbyte reads data from a source, it might need to keep a checkpoint cursor to resume + replication in the future from that saved checkpoint. + This is the object that is provided with state from previous runs and avoid replicating the entire set of + data everytime. + + :return: A generator that produces a stream of AirbyteRecordMessage contained in AirbyteMessage object. + """ + + logger.info(f"Reading data from {len(catalog.streams)} Firebolt tables.") + with establish_connection(config, logger) as connection: + with connection.cursor() as cursor: + for c_stream in catalog.streams: + table_name = c_stream.stream.name + table_properties = c_stream.stream.json_schema["properties"] + columns = list(table_properties.keys()) + + # Escape columns with " to avoid reserved keywords e.g. id + escaped_columns = ['"{}"'.format(col) for col in columns] + + query = "SELECT {columns} FROM {table}".format(columns=",".join(escaped_columns), table=table_name) + cursor.execute(query) + + logger.info(f"Fetched {cursor.rowcount} rows from table {table_name}.") + for result in cursor.fetchall(): + message = airbyte_message_from_data(result, columns, table_name) + if message: + yield message + logger.info("Data read complete.") diff --git a/airbyte-integrations/connectors/source-firebolt/source_firebolt/spec.json b/airbyte-integrations/connectors/source-firebolt/source_firebolt/spec.json new file mode 100644 index 000000000000..b3423e1eb4c1 --- /dev/null +++ b/airbyte-integrations/connectors/source-firebolt/source_firebolt/spec.json @@ -0,0 +1,44 @@ +{ + "documentationUrl": "https://docs.airbyte.io/integrations/sources/firebolt", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Firebolt Spec", + "type": "object", + "required": ["username", "password", "database"], + "additionalProperties": false, + "properties": { + "username": { + "type": "string", + "title": "Username", + "description": "Firebolt email address you use to login.", + "examples": ["username@email.com"] + }, + "password": { + "type": "string", + "title": "Password", + "description": "Firebolt password." + }, + "account": { + "type": "string", + "title": "Account", + "description": "Firebolt account to login." + }, + "host": { + "type": "string", + "title": "Host", + "description": "The host name of your Firebolt database.", + "examples": ["api.app.firebolt.io"] + }, + "database": { + "type": "string", + "title": "Database", + "description": "The database to connect to." + }, + "engine": { + "type": "string", + "title": "Engine", + "description": "Engine name or url to connect to." + } + } + } +} diff --git a/airbyte-integrations/connectors/source-firebolt/source_firebolt/utils.py b/airbyte-integrations/connectors/source-firebolt/source_firebolt/utils.py new file mode 100644 index 000000000000..f5f9e832b199 --- /dev/null +++ b/airbyte-integrations/connectors/source-firebolt/source_firebolt/utils.py @@ -0,0 +1,113 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from datetime import date, datetime +from decimal import Decimal +from typing import Any, Dict, List, Optional, Union + +from airbyte_cdk.models import AirbyteMessage, AirbyteRecordMessage, Type + + +def convert_type(fb_type: str, nullable: bool) -> Dict[str, Union[str, Dict]]: + """ + Convert from Firebolt type to Airbyte. If type is not defined in + Firebolt then it will be set to string, as per Airbyte reccommendation. + More on Firebolt types can be found in docs: + https://docs.firebolt.io/general-reference/data-types.html + + :param fb_type: Firebolt type. + + :return: Dict containing Airbyte type specification. + """ + map = { + "VARCHAR": {"type": "string"}, + "TEXT": {"type": "string"}, + "STRING": {"type": "string"}, + "INTEGER": {"type": "integer"}, + "INT": {"type": "integer"}, + "FLOAT": {"type": "number"}, + "DOUBLE": {"type": "number"}, + "DOUBLE PRECISION": {"type": "number"}, + "BOOLEAN": {"type": "integer"}, + # Firebolt bigint is max 8 byte so it fits in Airbyte's "integer" + "BIGINT": {"type": "integer"}, + "LONG": {"type": "integer"}, + "DECIMAL": {"type": "string", "airbyte_type": "big_number"}, + "DATE": {"type": "string", "format": "date"}, + "TIMESTAMP": { + "type": "string", + "format": "datetime", + "airbyte_type": "timestamp_without_timezone", + }, + "DATETIME": { + "type": "string", + "format": "datetime", + "airbyte_type": "timestamp_without_timezone", + }, + } + if fb_type.upper().startswith("ARRAY"): + inner_type = fb_type[6:-1] # Strip ARRAY() + # Array can't be nullable, but items can + airbyte_type = convert_type(inner_type, nullable=True) + result = {"type": "array", "items": airbyte_type} + else: + result = map.get(fb_type.upper(), {"type": "string"}) + if nullable: + result["type"] = ["null", result["type"]] + return result + + +def format_fetch_result(data: List[Any]) -> List[List[Any]]: + """ + Format data from a firebolt query to be compatible with Airbyte, + convert Firebolt timestamp string to Airbyte. + Firebolt stores dates in YYYY-MM-DD HH:mm:SS format. + Airbyte requires YYYY-MM-DDTHH:mm:SS. + + :param data: list of data items that may require conversion. + Example: [Decimal("22.1000921"), [2,3,4], datetime.datetime('2021-01-01 10:11:02')] + + :return: List of the same data as passed that's been converted to compatible types. + https://docs.airbyte.com/understanding-airbyte/supported-data-types/#the-types + """ + + for idx, item in enumerate(data): + if type(item) == datetime: + data[idx] = item.isoformat() + elif type(item) == date: + data[idx] = str(item) + elif type(item) == list: + data[idx] = format_fetch_result(item) + elif type(item) == Decimal: + data[idx] = str(item) + return data + + +def airbyte_message_from_data(raw_data: List[Any], columns: List[str], table_name: str) -> Optional[AirbyteMessage]: + """ + Wrap data into an AirbyteMessage. + + :param raw_data: Raw data row returned from a fetch query. Each item in the list + represents a row of data. + Example: [10, "Oranges"] + :param columns: List of column names + Example: ["Quantity", "Fruit"] + :param table_name: Name of a table where data was fetched from + + :return: AirbyteMessage containing parsed data + """ + raw_data = format_fetch_result(raw_data) + data = dict(zip(columns, raw_data)) + # Remove empty values + data = {k: v for k, v in data.items() if v is not None} + if not data: + return None + return AirbyteMessage( + type=Type.RECORD, + record=AirbyteRecordMessage( + stream=table_name, + data=data, + emitted_at=int(datetime.now().timestamp()) * 1000, + ), + ) diff --git a/airbyte-integrations/connectors/source-firebolt/unit_tests/test_firebolt_source.py b/airbyte-integrations/connectors/source-firebolt/unit_tests/test_firebolt_source.py new file mode 100644 index 000000000000..19cf1c8d2232 --- /dev/null +++ b/airbyte-integrations/connectors/source-firebolt/unit_tests/test_firebolt_source.py @@ -0,0 +1,294 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from datetime import date, datetime +from decimal import Decimal +from unittest.mock import AsyncMock, MagicMock, patch + +from airbyte_cdk.models import ( + AirbyteMessage, + AirbyteRecordMessage, + AirbyteStream, + ConfiguredAirbyteCatalog, + ConfiguredAirbyteStream, + DestinationSyncMode, + Status, + SyncMode, + Type, +) +from pytest import fixture, mark +from source_firebolt.database import parse_config +from source_firebolt.source import ( + SUPPORTED_SYNC_MODES, + SourceFirebolt, + convert_type, + establish_connection, + get_firebolt_tables, + get_table_stream, +) +from source_firebolt.utils import airbyte_message_from_data, format_fetch_result + + +@fixture(params=["my_engine", "my_engine.api.firebolt.io"]) +def config(request): + args = { + "database": "my_database", + "username": "my_username", + "password": "my_password", + "engine": request.param, + } + return args + + +@fixture() +def config_no_engine(): + args = { + "database": "my_database", + "username": "my_username", + "password": "my_password", + } + return args + + +@fixture +def stream1() -> AirbyteStream: + stream1 = AirbyteStream( + name="table1", + supported_sync_modes=SUPPORTED_SYNC_MODES, + json_schema={ + "type": "object", + "properties": {"col1": {"type": "string"}, "col2": {"type": "integer"}}, + }, + ) + return stream1 + + +@fixture +def stream2() -> AirbyteStream: + stream2 = AirbyteStream( + name="table2", + supported_sync_modes=SUPPORTED_SYNC_MODES, + json_schema={ + "type": "object", + "properties": { + "col3": {"type": "array", "items": {"type": ["null", "string"]}}, + "col4": {"type": "string", "airbyte_type": "big_number"}, + }, + }, + ) + return stream2 + + +@fixture +def table1_structure(): + return [("table1", "col1", "STRING", 0), ("table1", "col2", "INT", 0)] + + +@fixture +def table2_structure(): + return [("table2", "col3", "ARRAY", 0), ("table2", "col4", "DECIMAL", 0)] + + +@fixture +def logger(): + return MagicMock() + + +@fixture(name="mock_connection") +def async_connection_cursor_mock(): + connection = MagicMock() + cursor = AsyncMock() + connection.cursor.return_value = cursor + return connection, cursor + + +def test_parse_config(config, logger): + config["engine"] = "override_engine" + result = parse_config(config, logger) + assert result["database"] == "my_database" + assert result["engine_name"] == "override_engine" + assert result["auth"].username == "my_username" + assert result["auth"].password == "my_password" + config["engine"] = "override_engine.api.firebolt.io" + result = parse_config(config, logger) + assert result["engine_url"] == "override_engine.api.firebolt.io" + + +@patch("source_firebolt.database.connect") +def test_connection(mock_connection, config, config_no_engine, logger): + establish_connection(config, logger) + logger.reset_mock() + establish_connection(config_no_engine, logger) + assert any(["default engine" in msg.args[0] for msg in logger.info.mock_calls]), "No message on using default engine" + + +@mark.parametrize( + "type,nullable,result", + [ + ("VARCHAR", False, {"type": "string"}), + ("INT", False, {"type": "integer"}), + ("int", False, {"type": "integer"}), + ("LONG", False, {"type": "integer"}), + ( + "TIMESTAMP", + False, + { + "type": "string", + "format": "datetime", + "airbyte_type": "timestamp_without_timezone", + }, + ), + ("ARRAY(ARRAY(INT))", False, {"type": "array", "items": {"type": "array", "items": {"type": ["null", "integer"]}}}), + ("int", True, {"type": ["null", "integer"]}), + ("DUMMY", False, {"type": "string"}), + ("boolean", False, {"type": "integer"}), + ], +) +def test_convert_type(type, nullable, result): + assert convert_type(type, nullable) == result + + +@mark.parametrize( + "data,expected", + [ + ( + ["a", 1], + ["a", 1], + ), + ([datetime.fromisoformat("2019-01-01 20:12:02"), 2], ["2019-01-01T20:12:02", 2]), + ([[date.fromisoformat("2019-01-01"), 2], 0.2214], [["2019-01-01", 2], 0.2214]), + ([[None, 2], None], [[None, 2], None]), + ([Decimal("1231232.123459999990457054844258706536")], ["1231232.123459999990457054844258706536"]), + ], +) +def test_format_fetch_result(data, expected): + assert format_fetch_result(data) == expected + + +@patch("source_firebolt.utils.datetime") +def test_airbyte_message_from_data(mock_datetime): + mock_datetime.now.return_value.timestamp.return_value = 10 + raw_data = [1, "a", [1, 2, 3]] + columns = ["Col1", "Col2", "Col3"] + table_name = "dummy" + expected = AirbyteMessage( + type=Type.RECORD, + record=AirbyteRecordMessage( + stream="dummy", + data={"Col1": 1, "Col2": "a", "Col3": [1, 2, 3]}, + emitted_at=10000, + ), + ) + result = airbyte_message_from_data(raw_data, columns, table_name) + assert result == expected + + +def test_airbyte_message_from_data_no_data(): + raw_data = [] + columns = ["Col1", "Col2"] + table_name = "dummy" + result = airbyte_message_from_data(raw_data, columns, table_name) + assert result is None + + +@mark.asyncio +async def test_get_firebolt_tables(mock_connection): + connection, cursor = mock_connection + cursor.fetchall.return_value = [("table1",), ("table2",)] + result = await get_firebolt_tables(connection) + assert result == ["table1", "table2"] + + +@mark.asyncio +async def test_get_table_stream(mock_connection, table1_structure, stream1): + connection, cursor = mock_connection + cursor.fetchall.return_value = table1_structure + result = await get_table_stream(connection, "table1") + assert result == stream1 + + +@patch("source_firebolt.source.establish_connection") +def test_check(mock_connection, config, logger): + source = SourceFirebolt() + status = source.check(logger, config) + assert status.status == Status.SUCCEEDED + mock_connection().__enter__().cursor().__enter__().execute.side_effect = Exception("my exception") + status = source.check(logger, config) + assert status.status == Status.FAILED + + +@patch("source_firebolt.source.get_table_stream") +@patch("source_firebolt.source.establish_async_connection") +def test_discover( + mock_establish_connection, + mock_get_stream, + mock_connection, + config, + stream1, + stream2, + logger, +): + connection, cursor = mock_connection + cursor.fetchall.return_value = ["table1", "table2"] + mock_establish_connection.return_value.__aenter__.return_value = connection + mock_get_stream.side_effect = [stream1, stream2] + + source = SourceFirebolt() + catalog = source.discover(logger, config) + assert catalog.streams[0].name == "table1" + assert catalog.streams[1].name == "table2" + assert catalog.streams[0].json_schema == stream1.json_schema + assert catalog.streams[1].json_schema == stream2.json_schema + mock_establish_connection.assert_awaited_once_with(config, logger) + + +@patch("source_firebolt.source.establish_connection") +def test_read_no_state(mock_connection, config, stream1, logger): + source = SourceFirebolt() + + c_stream = ConfiguredAirbyteStream( + sync_mode=SyncMode.full_refresh, + destination_sync_mode=DestinationSyncMode.overwrite, + stream=stream1, + ) + catalog = ConfiguredAirbyteCatalog(streams=[c_stream]) + mock_connection().__enter__().cursor().__enter__().fetchall().__iter__.return_value = iter( + [ + ["s_value1", 1], + ["s_value2", 2], + ] + ) + message1 = next(source.read(logger, config, catalog, {})) + assert message1.record.stream == stream1.name + assert message1.record.data == {"col1": "s_value1", "col2": 1} + message2 = next(source.read(logger, config, catalog, {})) + assert message2.record.stream == stream1.name + assert message2.record.data == {"col1": "s_value2", "col2": 2} + + +@patch("source_firebolt.source.establish_connection") +def test_read_special_types_no_state(mock_connection, config, stream2, logger): + source = SourceFirebolt() + + c_stream = ConfiguredAirbyteStream( + sync_mode=SyncMode.full_refresh, + destination_sync_mode=DestinationSyncMode.overwrite, + stream=stream2, + ) + catalog = ConfiguredAirbyteCatalog(streams=[c_stream]) + mock_connection().__enter__().cursor().__enter__().fetchall().__iter__.return_value = iter( + [ + [ + [datetime.fromisoformat("2019-01-01 20:12:02"), datetime.fromisoformat("2019-02-01 20:12:02")], + Decimal("1231232.123459999990457054844258706536"), + ], + ] + ) + + message1 = next(source.read(logger, config, catalog, {})) + assert message1.record.stream == stream2.name + assert message1.record.data == { + "col3": ["2019-01-01T20:12:02", "2019-02-01T20:12:02"], + "col4": "1231232.123459999990457054844258706536", + } diff --git a/airbyte-integrations/connectors/source-github/Dockerfile b/airbyte-integrations/connectors/source-github/Dockerfile index d2bb7e7f0a5f..5f2b7be57460 100644 --- a/airbyte-integrations/connectors/source-github/Dockerfile +++ b/airbyte-integrations/connectors/source-github/Dockerfile @@ -12,5 +12,5 @@ RUN pip install . ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.2.33 +LABEL io.airbyte.version=0.2.38 LABEL io.airbyte.name=airbyte/source-github diff --git a/airbyte-integrations/connectors/source-github/acceptance-test-config.yml b/airbyte-integrations/connectors/source-github/acceptance-test-config.yml index bee378b5049a..35588853b0cc 100644 --- a/airbyte-integrations/connectors/source-github/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-github/acceptance-test-config.yml @@ -42,7 +42,7 @@ tests: releases: ["airbytehq/integration-test", "created_at"] repositories: ["airbytehq", "updated_at"] review_comments: ["airbytehq/integration-test", "updated_at"] - reviews: ["airbytehq/integration-test", "pull_request_updated_at"] + reviews: ["airbytehq/integration-test", "updated_at"] stargazers: ["airbytehq/integration-test", "starred_at"] workflow_runs: ["airbytehq/integration-test", "updated_at"] workflows: ["airbytehq/integration-test", "updated_at"] diff --git a/airbyte-integrations/connectors/source-github/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-github/integration_tests/abnormal_state.json index 086002a01cac..3b0fb905f2a9 100644 --- a/airbyte-integrations/connectors/source-github/integration_tests/abnormal_state.json +++ b/airbyte-integrations/connectors/source-github/integration_tests/abnormal_state.json @@ -121,7 +121,7 @@ }, "reviews": { "airbytehq/integration-test": { - "pull_request_updated_at": "2121-06-29T02:04:57Z" + "updated_at": "2121-06-29T02:04:57Z" } }, "stargazers": { diff --git a/airbyte-integrations/connectors/source-github/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-github/integration_tests/configured_catalog.json index afe7f85524f8..bd84a9f6f97d 100644 --- a/airbyte-integrations/connectors/source-github/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-github/integration_tests/configured_catalog.json @@ -311,12 +311,12 @@ "json_schema": {}, "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, - "default_cursor_field": ["pull_request_updated_at"], + "default_cursor_field": ["updated_at"], "source_defined_primary_key": [["id"]] }, "sync_mode": "incremental", "destination_sync_mode": "append", - "cursor_field": ["pull_request_updated_at"] + "cursor_field": ["updated_at"] }, { "stream": { diff --git a/airbyte-integrations/connectors/source-github/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-github/integration_tests/sample_state.json index b6d55e18331c..932d299c0220 100644 --- a/airbyte-integrations/connectors/source-github/integration_tests/sample_state.json +++ b/airbyte-integrations/connectors/source-github/integration_tests/sample_state.json @@ -61,7 +61,7 @@ }, "reviews": { "airbytehq/integration-test": { - "pull_request_updated_at": "2021-08-30T12:01:15Z" + "updated_at": "2021-08-30T12:01:15Z" } } } diff --git a/airbyte-integrations/connectors/source-github/setup.py b/airbyte-integrations/connectors/source-github/setup.py index 2f966207e021..52380693ac32 100644 --- a/airbyte-integrations/connectors/source-github/setup.py +++ b/airbyte-integrations/connectors/source-github/setup.py @@ -5,11 +5,7 @@ from setuptools import find_packages, setup -MAIN_REQUIREMENTS = [ - "airbyte-cdk~=0.1.33", - "vcrpy==4.1.1", - "pendulum~=2.1.2", -] +MAIN_REQUIREMENTS = ["airbyte-cdk~=0.1.33", "vcrpy==4.1.1", "pendulum~=2.1.2", "sgqlc"] TEST_REQUIREMENTS = ["pytest~=6.1", "source-acceptance-test", "responses~=0.19.0"] diff --git a/airbyte-integrations/connectors/source-github/source_github/github_schema.py b/airbyte-integrations/connectors/source-github/source_github/github_schema.py new file mode 100644 index 000000000000..9f223ec59d96 --- /dev/null +++ b/airbyte-integrations/connectors/source-github/source_github/github_schema.py @@ -0,0 +1,34194 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import sgqlc.types +import sgqlc.types.datetime +import sgqlc.types.relay + +github_schema = sgqlc.types.Schema() + + +# Unexport Node/PageInfo, let schema re-declare them +github_schema -= sgqlc.types.relay.Node +github_schema -= sgqlc.types.relay.PageInfo + + +__docformat__ = "markdown" + + +######################################################################## +# Scalars and Enumerations +######################################################################## +class ActorType(sgqlc.types.Enum): + """The actor's type. + + Enumeration Choices: + + * `TEAM`: Indicates a team actor. + * `USER`: Indicates a user actor. + """ + + __schema__ = github_schema + __choices__ = ("TEAM", "USER") + + +class AuditLogOrderField(sgqlc.types.Enum): + """Properties by which Audit Log connections can be ordered. + + Enumeration Choices: + + * `CREATED_AT`: Order audit log entries by timestamp + """ + + __schema__ = github_schema + __choices__ = ("CREATED_AT",) + + +class Base64String(sgqlc.types.Scalar): + """A (potentially binary) string encoded using base64.""" + + __schema__ = github_schema + + +Boolean = sgqlc.types.Boolean + + +class CheckAnnotationLevel(sgqlc.types.Enum): + """Represents an annotation's information level. + + Enumeration Choices: + + * `FAILURE`: An annotation indicating an inescapable error. + * `NOTICE`: An annotation indicating some information. + * `WARNING`: An annotation indicating an ignorable error. + """ + + __schema__ = github_schema + __choices__ = ("FAILURE", "NOTICE", "WARNING") + + +class CheckConclusionState(sgqlc.types.Enum): + """The possible states for a check suite or run conclusion. + + Enumeration Choices: + + * `ACTION_REQUIRED`: The check suite or run requires action. + * `CANCELLED`: The check suite or run has been cancelled. + * `FAILURE`: The check suite or run has failed. + * `NEUTRAL`: The check suite or run was neutral. + * `SKIPPED`: The check suite or run was skipped. + * `STALE`: The check suite or run was marked stale by GitHub. Only + GitHub can use this conclusion. + * `STARTUP_FAILURE`: The check suite or run has failed at startup. + * `SUCCESS`: The check suite or run has succeeded. + * `TIMED_OUT`: The check suite or run has timed out. + """ + + __schema__ = github_schema + __choices__ = ("ACTION_REQUIRED", "CANCELLED", "FAILURE", "NEUTRAL", "SKIPPED", "STALE", "STARTUP_FAILURE", "SUCCESS", "TIMED_OUT") + + +class CheckRunType(sgqlc.types.Enum): + """The possible types of check runs. + + Enumeration Choices: + + * `ALL`: Every check run available. + * `LATEST`: The latest check run. + """ + + __schema__ = github_schema + __choices__ = ("ALL", "LATEST") + + +class CheckStatusState(sgqlc.types.Enum): + """The possible states for a check suite or run status. + + Enumeration Choices: + + * `COMPLETED`: The check suite or run has been completed. + * `IN_PROGRESS`: The check suite or run is in progress. + * `PENDING`: The check suite or run is in pending state. + * `QUEUED`: The check suite or run has been queued. + * `REQUESTED`: The check suite or run has been requested. + * `WAITING`: The check suite or run is in waiting state. + """ + + __schema__ = github_schema + __choices__ = ("COMPLETED", "IN_PROGRESS", "PENDING", "QUEUED", "REQUESTED", "WAITING") + + +class CollaboratorAffiliation(sgqlc.types.Enum): + """Collaborators affiliation level with a subject. + + Enumeration Choices: + + * `ALL`: All collaborators the authenticated user can see. + * `DIRECT`: All collaborators with permissions to an organization- + owned subject, regardless of organization membership status. + * `OUTSIDE`: All outside collaborators of an organization-owned + subject. + """ + + __schema__ = github_schema + __choices__ = ("ALL", "DIRECT", "OUTSIDE") + + +class CommentAuthorAssociation(sgqlc.types.Enum): + """A comment author association with repository. + + Enumeration Choices: + + * `COLLABORATOR`: Author has been invited to collaborate on the + repository. + * `CONTRIBUTOR`: Author has previously committed to the + repository. + * `FIRST_TIMER`: Author has not previously committed to GitHub. + * `FIRST_TIME_CONTRIBUTOR`: Author has not previously committed to + the repository. + * `MANNEQUIN`: Author is a placeholder for an unclaimed user. + * `MEMBER`: Author is a member of the organization that owns the + repository. + * `NONE`: Author has no association with the repository. + * `OWNER`: Author is the owner of the repository. + """ + + __schema__ = github_schema + __choices__ = ("COLLABORATOR", "CONTRIBUTOR", "FIRST_TIMER", "FIRST_TIME_CONTRIBUTOR", "MANNEQUIN", "MEMBER", "NONE", "OWNER") + + +class CommentCannotUpdateReason(sgqlc.types.Enum): + """The possible errors that will prevent a user from updating a + comment. + + Enumeration Choices: + + * `ARCHIVED`: Unable to create comment because repository is + archived. + * `DENIED`: You cannot update this comment + * `INSUFFICIENT_ACCESS`: You must be the author or have write + access to this repository to update this comment. + * `LOCKED`: Unable to create comment because issue is locked. + * `LOGIN_REQUIRED`: You must be logged in to update this comment. + * `MAINTENANCE`: Repository is under maintenance. + * `VERIFIED_EMAIL_REQUIRED`: At least one email address must be + verified to update this comment. + """ + + __schema__ = github_schema + __choices__ = ("ARCHIVED", "DENIED", "INSUFFICIENT_ACCESS", "LOCKED", "LOGIN_REQUIRED", "MAINTENANCE", "VERIFIED_EMAIL_REQUIRED") + + +class CommitContributionOrderField(sgqlc.types.Enum): + """Properties by which commit contribution connections can be + ordered. + + Enumeration Choices: + + * `COMMIT_COUNT`: Order commit contributions by how many commits + they represent. + * `OCCURRED_AT`: Order commit contributions by when they were + made. + """ + + __schema__ = github_schema + __choices__ = ("COMMIT_COUNT", "OCCURRED_AT") + + +class ContributionLevel(sgqlc.types.Enum): + """Varying levels of contributions from none to many. + + Enumeration Choices: + + * `FIRST_QUARTILE`: Lowest 25% of days of contributions. + * `FOURTH_QUARTILE`: Highest 25% of days of contributions. More + contributions than the third quartile. + * `NONE`: No contributions occurred. + * `SECOND_QUARTILE`: Second lowest 25% of days of contributions. + More contributions than the first quartile. + * `THIRD_QUARTILE`: Second highest 25% of days of contributions. + More contributions than second quartile, less than the fourth + quartile. + """ + + __schema__ = github_schema + __choices__ = ("FIRST_QUARTILE", "FOURTH_QUARTILE", "NONE", "SECOND_QUARTILE", "THIRD_QUARTILE") + + +Date = sgqlc.types.datetime.Date + +DateTime = sgqlc.types.datetime.DateTime + + +class DefaultRepositoryPermissionField(sgqlc.types.Enum): + """The possible base permissions for repositories. + + Enumeration Choices: + + * `ADMIN`: Can read, write, and administrate repos by default + * `NONE`: No access + * `READ`: Can read repos by default + * `WRITE`: Can read and write repos by default + """ + + __schema__ = github_schema + __choices__ = ("ADMIN", "NONE", "READ", "WRITE") + + +class DependencyGraphEcosystem(sgqlc.types.Enum): + """The possible ecosystems of a dependency graph package. + + Enumeration Choices: + + * `ACTIONS`: GitHub Actions + * `COMPOSER`: PHP packages hosted at packagist.org + * `GO`: Go modules + * `MAVEN`: Java artifacts hosted at the Maven central repository + * `NPM`: JavaScript packages hosted at npmjs.com + * `NUGET`: .NET packages hosted at the NuGet Gallery + * `PIP`: Python packages hosted at PyPI.org + * `RUBYGEMS`: Ruby gems hosted at RubyGems.org + * `RUST`: Rust crates + """ + + __schema__ = github_schema + __choices__ = ("ACTIONS", "COMPOSER", "GO", "MAVEN", "NPM", "NUGET", "PIP", "RUBYGEMS", "RUST") + + +class DeploymentOrderField(sgqlc.types.Enum): + """Properties by which deployment connections can be ordered. + + Enumeration Choices: + + * `CREATED_AT`: Order collection by creation time + """ + + __schema__ = github_schema + __choices__ = ("CREATED_AT",) + + +class DeploymentProtectionRuleType(sgqlc.types.Enum): + """The possible protection rule types. + + Enumeration Choices: + + * `REQUIRED_REVIEWERS`: Required reviewers + * `WAIT_TIMER`: Wait timer + """ + + __schema__ = github_schema + __choices__ = ("REQUIRED_REVIEWERS", "WAIT_TIMER") + + +class DeploymentReviewState(sgqlc.types.Enum): + """The possible states for a deployment review. + + Enumeration Choices: + + * `APPROVED`: The deployment was approved. + * `REJECTED`: The deployment was rejected. + """ + + __schema__ = github_schema + __choices__ = ("APPROVED", "REJECTED") + + +class DeploymentState(sgqlc.types.Enum): + """The possible states in which a deployment can be. + + Enumeration Choices: + + * `ABANDONED`: The pending deployment was not updated after 30 + minutes. + * `ACTIVE`: The deployment is currently active. + * `DESTROYED`: An inactive transient deployment. + * `ERROR`: The deployment experienced an error. + * `FAILURE`: The deployment has failed. + * `INACTIVE`: The deployment is inactive. + * `IN_PROGRESS`: The deployment is in progress. + * `PENDING`: The deployment is pending. + * `QUEUED`: The deployment has queued + * `WAITING`: The deployment is waiting. + """ + + __schema__ = github_schema + __choices__ = ("ABANDONED", "ACTIVE", "DESTROYED", "ERROR", "FAILURE", "INACTIVE", "IN_PROGRESS", "PENDING", "QUEUED", "WAITING") + + +class DeploymentStatusState(sgqlc.types.Enum): + """The possible states for a deployment status. + + Enumeration Choices: + + * `ERROR`: The deployment experienced an error. + * `FAILURE`: The deployment has failed. + * `INACTIVE`: The deployment is inactive. + * `IN_PROGRESS`: The deployment is in progress. + * `PENDING`: The deployment is pending. + * `QUEUED`: The deployment is queued + * `SUCCESS`: The deployment was successful. + * `WAITING`: The deployment is waiting. + """ + + __schema__ = github_schema + __choices__ = ("ERROR", "FAILURE", "INACTIVE", "IN_PROGRESS", "PENDING", "QUEUED", "SUCCESS", "WAITING") + + +class DiffSide(sgqlc.types.Enum): + """The possible sides of a diff. + + Enumeration Choices: + + * `LEFT`: The left side of the diff. + * `RIGHT`: The right side of the diff. + """ + + __schema__ = github_schema + __choices__ = ("LEFT", "RIGHT") + + +class DiscussionOrderField(sgqlc.types.Enum): + """Properties by which discussion connections can be ordered. + + Enumeration Choices: + + * `CREATED_AT`: Order discussions by creation time. + * `UPDATED_AT`: Order discussions by most recent modification + time. + """ + + __schema__ = github_schema + __choices__ = ("CREATED_AT", "UPDATED_AT") + + +class DiscussionPollOptionOrderField(sgqlc.types.Enum): + """Properties by which discussion poll option connections can be + ordered. + + Enumeration Choices: + + * `AUTHORED_ORDER`: Order poll options by the order that the poll + author specified when creating the poll. + * `VOTE_COUNT`: Order poll options by the number of votes it has. + """ + + __schema__ = github_schema + __choices__ = ("AUTHORED_ORDER", "VOTE_COUNT") + + +class DismissReason(sgqlc.types.Enum): + """The possible reasons that a Dependabot alert was dismissed. + + Enumeration Choices: + + * `FIX_STARTED`: A fix has already been started + * `INACCURATE`: This alert is inaccurate or incorrect + * `NOT_USED`: Vulnerable code is not actually used + * `NO_BANDWIDTH`: No bandwidth to fix this + * `TOLERABLE_RISK`: Risk is tolerable to this project + """ + + __schema__ = github_schema + __choices__ = ("FIX_STARTED", "INACCURATE", "NOT_USED", "NO_BANDWIDTH", "TOLERABLE_RISK") + + +class EnterpriseAdministratorInvitationOrderField(sgqlc.types.Enum): + """Properties by which enterprise administrator invitation + connections can be ordered. + + Enumeration Choices: + + * `CREATED_AT`: Order enterprise administrator member invitations + by creation time + """ + + __schema__ = github_schema + __choices__ = ("CREATED_AT",) + + +class EnterpriseAdministratorRole(sgqlc.types.Enum): + """The possible administrator roles in an enterprise account. + + Enumeration Choices: + + * `BILLING_MANAGER`: Represents a billing manager of the + enterprise account. + * `OWNER`: Represents an owner of the enterprise account. + """ + + __schema__ = github_schema + __choices__ = ("BILLING_MANAGER", "OWNER") + + +class EnterpriseDefaultRepositoryPermissionSettingValue(sgqlc.types.Enum): + """The possible values for the enterprise base repository permission + setting. + + Enumeration Choices: + + * `ADMIN`: Organization members will be able to clone, pull, push, + and add new collaborators to all organization repositories. + * `NONE`: Organization members will only be able to clone and pull + public repositories. + * `NO_POLICY`: Organizations in the enterprise choose base + repository permissions for their members. + * `READ`: Organization members will be able to clone and pull all + organization repositories. + * `WRITE`: Organization members will be able to clone, pull, and + push all organization repositories. + """ + + __schema__ = github_schema + __choices__ = ("ADMIN", "NONE", "NO_POLICY", "READ", "WRITE") + + +class EnterpriseEnabledDisabledSettingValue(sgqlc.types.Enum): + """The possible values for an enabled/disabled enterprise setting. + + Enumeration Choices: + + * `DISABLED`: The setting is disabled for organizations in the + enterprise. + * `ENABLED`: The setting is enabled for organizations in the + enterprise. + * `NO_POLICY`: There is no policy set for organizations in the + enterprise. + """ + + __schema__ = github_schema + __choices__ = ("DISABLED", "ENABLED", "NO_POLICY") + + +class EnterpriseEnabledSettingValue(sgqlc.types.Enum): + """The possible values for an enabled/no policy enterprise setting. + + Enumeration Choices: + + * `ENABLED`: The setting is enabled for organizations in the + enterprise. + * `NO_POLICY`: There is no policy set for organizations in the + enterprise. + """ + + __schema__ = github_schema + __choices__ = ("ENABLED", "NO_POLICY") + + +class EnterpriseMemberOrderField(sgqlc.types.Enum): + """Properties by which enterprise member connections can be ordered. + + Enumeration Choices: + + * `CREATED_AT`: Order enterprise members by creation time + * `LOGIN`: Order enterprise members by login + """ + + __schema__ = github_schema + __choices__ = ("CREATED_AT", "LOGIN") + + +class EnterpriseMembersCanCreateRepositoriesSettingValue(sgqlc.types.Enum): + """The possible values for the enterprise members can create + repositories setting. + + Enumeration Choices: + + * `ALL`: Members will be able to create public and private + repositories. + * `DISABLED`: Members will not be able to create public or private + repositories. + * `NO_POLICY`: Organization administrators choose whether to allow + members to create repositories. + * `PRIVATE`: Members will be able to create only private + repositories. + * `PUBLIC`: Members will be able to create only public + repositories. + """ + + __schema__ = github_schema + __choices__ = ("ALL", "DISABLED", "NO_POLICY", "PRIVATE", "PUBLIC") + + +class EnterpriseMembersCanMakePurchasesSettingValue(sgqlc.types.Enum): + """The possible values for the members can make purchases setting. + + Enumeration Choices: + + * `DISABLED`: The setting is disabled for organizations in the + enterprise. + * `ENABLED`: The setting is enabled for organizations in the + enterprise. + """ + + __schema__ = github_schema + __choices__ = ("DISABLED", "ENABLED") + + +class EnterpriseServerInstallationOrderField(sgqlc.types.Enum): + """Properties by which Enterprise Server installation connections can + be ordered. + + Enumeration Choices: + + * `CREATED_AT`: Order Enterprise Server installations by creation + time + * `CUSTOMER_NAME`: Order Enterprise Server installations by + customer name + * `HOST_NAME`: Order Enterprise Server installations by host name + """ + + __schema__ = github_schema + __choices__ = ("CREATED_AT", "CUSTOMER_NAME", "HOST_NAME") + + +class EnterpriseServerUserAccountEmailOrderField(sgqlc.types.Enum): + """Properties by which Enterprise Server user account email + connections can be ordered. + + Enumeration Choices: + + * `EMAIL`: Order emails by email + """ + + __schema__ = github_schema + __choices__ = ("EMAIL",) + + +class EnterpriseServerUserAccountOrderField(sgqlc.types.Enum): + """Properties by which Enterprise Server user account connections can + be ordered. + + Enumeration Choices: + + * `LOGIN`: Order user accounts by login + * `REMOTE_CREATED_AT`: Order user accounts by creation time on the + Enterprise Server installation + """ + + __schema__ = github_schema + __choices__ = ("LOGIN", "REMOTE_CREATED_AT") + + +class EnterpriseServerUserAccountsUploadOrderField(sgqlc.types.Enum): + """Properties by which Enterprise Server user accounts upload + connections can be ordered. + + Enumeration Choices: + + * `CREATED_AT`: Order user accounts uploads by creation time + """ + + __schema__ = github_schema + __choices__ = ("CREATED_AT",) + + +class EnterpriseServerUserAccountsUploadSyncState(sgqlc.types.Enum): + """Synchronization state of the Enterprise Server user accounts + upload + + Enumeration Choices: + + * `FAILURE`: The synchronization of the upload failed. + * `PENDING`: The synchronization of the upload is pending. + * `SUCCESS`: The synchronization of the upload succeeded. + """ + + __schema__ = github_schema + __choices__ = ("FAILURE", "PENDING", "SUCCESS") + + +class EnterpriseUserAccountMembershipRole(sgqlc.types.Enum): + """The possible roles for enterprise membership. + + Enumeration Choices: + + * `MEMBER`: The user is a member of an organization in the + enterprise. + * `OWNER`: The user is an owner of an organization in the + enterprise. + """ + + __schema__ = github_schema + __choices__ = ("MEMBER", "OWNER") + + +class EnterpriseUserDeployment(sgqlc.types.Enum): + """The possible GitHub Enterprise deployments where this user can + exist. + + Enumeration Choices: + + * `CLOUD`: The user is part of a GitHub Enterprise Cloud + deployment. + * `SERVER`: The user is part of a GitHub Enterprise Server + deployment. + """ + + __schema__ = github_schema + __choices__ = ("CLOUD", "SERVER") + + +class FileViewedState(sgqlc.types.Enum): + """The possible viewed states of a file . + + Enumeration Choices: + + * `DISMISSED`: The file has new changes since last viewed. + * `UNVIEWED`: The file has not been marked as viewed. + * `VIEWED`: The file has been marked as viewed. + """ + + __schema__ = github_schema + __choices__ = ("DISMISSED", "UNVIEWED", "VIEWED") + + +Float = sgqlc.types.Float + + +class FundingPlatform(sgqlc.types.Enum): + """The possible funding platforms for repository funding links. + + Enumeration Choices: + + * `COMMUNITY_BRIDGE`: Community Bridge funding platform. + * `CUSTOM`: Custom funding platform. + * `GITHUB`: GitHub funding platform. + * `ISSUEHUNT`: IssueHunt funding platform. + * `KO_FI`: Ko-fi funding platform. + * `LFX_CROWDFUNDING`: LFX Crowdfunding funding platform. + * `LIBERAPAY`: Liberapay funding platform. + * `OPEN_COLLECTIVE`: Open Collective funding platform. + * `OTECHIE`: Otechie funding platform. + * `PATREON`: Patreon funding platform. + * `TIDELIFT`: Tidelift funding platform. + """ + + __schema__ = github_schema + __choices__ = ( + "COMMUNITY_BRIDGE", + "CUSTOM", + "GITHUB", + "ISSUEHUNT", + "KO_FI", + "LFX_CROWDFUNDING", + "LIBERAPAY", + "OPEN_COLLECTIVE", + "OTECHIE", + "PATREON", + "TIDELIFT", + ) + + +class GistOrderField(sgqlc.types.Enum): + """Properties by which gist connections can be ordered. + + Enumeration Choices: + + * `CREATED_AT`: Order gists by creation time + * `PUSHED_AT`: Order gists by push time + * `UPDATED_AT`: Order gists by update time + """ + + __schema__ = github_schema + __choices__ = ("CREATED_AT", "PUSHED_AT", "UPDATED_AT") + + +class GistPrivacy(sgqlc.types.Enum): + """The privacy of a Gist + + Enumeration Choices: + + * `ALL`: Gists that are public and secret + * `PUBLIC`: Public + * `SECRET`: Secret + """ + + __schema__ = github_schema + __choices__ = ("ALL", "PUBLIC", "SECRET") + + +class GitObjectID(sgqlc.types.Scalar): + """A Git object ID.""" + + __schema__ = github_schema + + +class GitSSHRemote(sgqlc.types.Scalar): + """Git SSH string""" + + __schema__ = github_schema + + +class GitSignatureState(sgqlc.types.Enum): + """The state of a Git signature. + + Enumeration Choices: + + * `BAD_CERT`: The signing certificate or its chain could not be + verified + * `BAD_EMAIL`: Invalid email used for signing + * `EXPIRED_KEY`: Signing key expired + * `GPGVERIFY_ERROR`: Internal error - the GPG verification service + misbehaved + * `GPGVERIFY_UNAVAILABLE`: Internal error - the GPG verification + service is unavailable at the moment + * `INVALID`: Invalid signature + * `MALFORMED_SIG`: Malformed signature + * `NOT_SIGNING_KEY`: The usage flags for the key that signed this + don't allow signing + * `NO_USER`: Email used for signing not known to GitHub + * `OCSP_ERROR`: Valid signature, though certificate revocation + check failed + * `OCSP_PENDING`: Valid signature, pending certificate revocation + checking + * `OCSP_REVOKED`: One or more certificates in chain has been + revoked + * `UNKNOWN_KEY`: Key used for signing not known to GitHub + * `UNKNOWN_SIG_TYPE`: Unknown signature type + * `UNSIGNED`: Unsigned + * `UNVERIFIED_EMAIL`: Email used for signing unverified on GitHub + * `VALID`: Valid signature and verified by GitHub + """ + + __schema__ = github_schema + __choices__ = ( + "BAD_CERT", + "BAD_EMAIL", + "EXPIRED_KEY", + "GPGVERIFY_ERROR", + "GPGVERIFY_UNAVAILABLE", + "INVALID", + "MALFORMED_SIG", + "NOT_SIGNING_KEY", + "NO_USER", + "OCSP_ERROR", + "OCSP_PENDING", + "OCSP_REVOKED", + "UNKNOWN_KEY", + "UNKNOWN_SIG_TYPE", + "UNSIGNED", + "UNVERIFIED_EMAIL", + "VALID", + ) + + +class GitTimestamp(sgqlc.types.Scalar): + """An ISO-8601 encoded date string. Unlike the DateTime type, + GitTimestamp is not converted in UTC. + """ + + __schema__ = github_schema + + +class HTML(sgqlc.types.Scalar): + """A string containing HTML code.""" + + __schema__ = github_schema + + +ID = sgqlc.types.ID + + +class IdentityProviderConfigurationState(sgqlc.types.Enum): + """The possible states in which authentication can be configured with + an identity provider. + + Enumeration Choices: + + * `CONFIGURED`: Authentication with an identity provider is + configured but not enforced. + * `ENFORCED`: Authentication with an identity provider is + configured and enforced. + * `UNCONFIGURED`: Authentication with an identity provider is not + configured. + """ + + __schema__ = github_schema + __choices__ = ("CONFIGURED", "ENFORCED", "UNCONFIGURED") + + +Int = sgqlc.types.Int + + +class IpAllowListEnabledSettingValue(sgqlc.types.Enum): + """The possible values for the IP allow list enabled setting. + + Enumeration Choices: + + * `DISABLED`: The setting is disabled for the owner. + * `ENABLED`: The setting is enabled for the owner. + """ + + __schema__ = github_schema + __choices__ = ("DISABLED", "ENABLED") + + +class IpAllowListEntryOrderField(sgqlc.types.Enum): + """Properties by which IP allow list entry connections can be + ordered. + + Enumeration Choices: + + * `ALLOW_LIST_VALUE`: Order IP allow list entries by the allow + list value. + * `CREATED_AT`: Order IP allow list entries by creation time. + """ + + __schema__ = github_schema + __choices__ = ("ALLOW_LIST_VALUE", "CREATED_AT") + + +class IpAllowListForInstalledAppsEnabledSettingValue(sgqlc.types.Enum): + """The possible values for the IP allow list configuration for + installed GitHub Apps setting. + + Enumeration Choices: + + * `DISABLED`: The setting is disabled for the owner. + * `ENABLED`: The setting is enabled for the owner. + """ + + __schema__ = github_schema + __choices__ = ("DISABLED", "ENABLED") + + +class IssueClosedStateReason(sgqlc.types.Enum): + """The possible state reasons of a closed issue. + + Enumeration Choices: + + * `COMPLETED`: An issue that has been closed as completed + * `NOT_PLANNED`: An issue that has been closed as not planned + """ + + __schema__ = github_schema + __choices__ = ("COMPLETED", "NOT_PLANNED") + + +class IssueCommentOrderField(sgqlc.types.Enum): + """Properties by which issue comment connections can be ordered. + + Enumeration Choices: + + * `UPDATED_AT`: Order issue comments by update time + """ + + __schema__ = github_schema + __choices__ = ("UPDATED_AT",) + + +class IssueOrderField(sgqlc.types.Enum): + """Properties by which issue connections can be ordered. + + Enumeration Choices: + + * `COMMENTS`: Order issues by comment count + * `CREATED_AT`: Order issues by creation time + * `UPDATED_AT`: Order issues by update time + """ + + __schema__ = github_schema + __choices__ = ("COMMENTS", "CREATED_AT", "UPDATED_AT") + + +class IssueState(sgqlc.types.Enum): + """The possible states of an issue. + + Enumeration Choices: + + * `CLOSED`: An issue that has been closed + * `OPEN`: An issue that is still open + """ + + __schema__ = github_schema + __choices__ = ("CLOSED", "OPEN") + + +class IssueStateReason(sgqlc.types.Enum): + """The possible state reasons of an issue. + + Enumeration Choices: + + * `COMPLETED`: An issue that has been closed as completed + * `NOT_PLANNED`: An issue that has been closed as not planned + * `REOPENED`: An issue that has been reopened + """ + + __schema__ = github_schema + __choices__ = ("COMPLETED", "NOT_PLANNED", "REOPENED") + + +class IssueTimelineItemsItemType(sgqlc.types.Enum): + """The possible item types found in a timeline. + + Enumeration Choices: + + * `ADDED_TO_PROJECT_EVENT`: Represents a 'added_to_project' event + on a given issue or pull request. + * `ASSIGNED_EVENT`: Represents an 'assigned' event on any + assignable object. + * `CLOSED_EVENT`: Represents a 'closed' event on any `Closable`. + * `COMMENT_DELETED_EVENT`: Represents a 'comment_deleted' event on + a given issue or pull request. + * `CONNECTED_EVENT`: Represents a 'connected' event on a given + issue or pull request. + * `CONVERTED_NOTE_TO_ISSUE_EVENT`: Represents a + 'converted_note_to_issue' event on a given issue or pull + request. + * `CONVERTED_TO_DISCUSSION_EVENT`: Represents a + 'converted_to_discussion' event on a given issue. + * `CROSS_REFERENCED_EVENT`: Represents a mention made by one issue + or pull request to another. + * `DEMILESTONED_EVENT`: Represents a 'demilestoned' event on a + given issue or pull request. + * `DISCONNECTED_EVENT`: Represents a 'disconnected' event on a + given issue or pull request. + * `ISSUE_COMMENT`: Represents a comment on an Issue. + * `LABELED_EVENT`: Represents a 'labeled' event on a given issue + or pull request. + * `LOCKED_EVENT`: Represents a 'locked' event on a given issue or + pull request. + * `MARKED_AS_DUPLICATE_EVENT`: Represents a 'marked_as_duplicate' + event on a given issue or pull request. + * `MENTIONED_EVENT`: Represents a 'mentioned' event on a given + issue or pull request. + * `MILESTONED_EVENT`: Represents a 'milestoned' event on a given + issue or pull request. + * `MOVED_COLUMNS_IN_PROJECT_EVENT`: Represents a + 'moved_columns_in_project' event on a given issue or pull + request. + * `PINNED_EVENT`: Represents a 'pinned' event on a given issue or + pull request. + * `REFERENCED_EVENT`: Represents a 'referenced' event on a given + `ReferencedSubject`. + * `REMOVED_FROM_PROJECT_EVENT`: Represents a + 'removed_from_project' event on a given issue or pull request. + * `RENAMED_TITLE_EVENT`: Represents a 'renamed' event on a given + issue or pull request + * `REOPENED_EVENT`: Represents a 'reopened' event on any + `Closable`. + * `SUBSCRIBED_EVENT`: Represents a 'subscribed' event on a given + `Subscribable`. + * `TRANSFERRED_EVENT`: Represents a 'transferred' event on a given + issue or pull request. + * `UNASSIGNED_EVENT`: Represents an 'unassigned' event on any + assignable object. + * `UNLABELED_EVENT`: Represents an 'unlabeled' event on a given + issue or pull request. + * `UNLOCKED_EVENT`: Represents an 'unlocked' event on a given + issue or pull request. + * `UNMARKED_AS_DUPLICATE_EVENT`: Represents an + 'unmarked_as_duplicate' event on a given issue or pull request. + * `UNPINNED_EVENT`: Represents an 'unpinned' event on a given + issue or pull request. + * `UNSUBSCRIBED_EVENT`: Represents an 'unsubscribed' event on a + given `Subscribable`. + * `USER_BLOCKED_EVENT`: Represents a 'user_blocked' event on a + given user. + """ + + __schema__ = github_schema + __choices__ = ( + "ADDED_TO_PROJECT_EVENT", + "ASSIGNED_EVENT", + "CLOSED_EVENT", + "COMMENT_DELETED_EVENT", + "CONNECTED_EVENT", + "CONVERTED_NOTE_TO_ISSUE_EVENT", + "CONVERTED_TO_DISCUSSION_EVENT", + "CROSS_REFERENCED_EVENT", + "DEMILESTONED_EVENT", + "DISCONNECTED_EVENT", + "ISSUE_COMMENT", + "LABELED_EVENT", + "LOCKED_EVENT", + "MARKED_AS_DUPLICATE_EVENT", + "MENTIONED_EVENT", + "MILESTONED_EVENT", + "MOVED_COLUMNS_IN_PROJECT_EVENT", + "PINNED_EVENT", + "REFERENCED_EVENT", + "REMOVED_FROM_PROJECT_EVENT", + "RENAMED_TITLE_EVENT", + "REOPENED_EVENT", + "SUBSCRIBED_EVENT", + "TRANSFERRED_EVENT", + "UNASSIGNED_EVENT", + "UNLABELED_EVENT", + "UNLOCKED_EVENT", + "UNMARKED_AS_DUPLICATE_EVENT", + "UNPINNED_EVENT", + "UNSUBSCRIBED_EVENT", + "USER_BLOCKED_EVENT", + ) + + +class LabelOrderField(sgqlc.types.Enum): + """Properties by which label connections can be ordered. + + Enumeration Choices: + + * `CREATED_AT`: Order labels by creation time + * `NAME`: Order labels by name + """ + + __schema__ = github_schema + __choices__ = ("CREATED_AT", "NAME") + + +class LanguageOrderField(sgqlc.types.Enum): + """Properties by which language connections can be ordered. + + Enumeration Choices: + + * `SIZE`: Order languages by the size of all files containing the + language + """ + + __schema__ = github_schema + __choices__ = ("SIZE",) + + +class LockReason(sgqlc.types.Enum): + """The possible reasons that an issue or pull request was locked. + + Enumeration Choices: + + * `OFF_TOPIC`: The issue or pull request was locked because the + conversation was off-topic. + * `RESOLVED`: The issue or pull request was locked because the + conversation was resolved. + * `SPAM`: The issue or pull request was locked because the + conversation was spam. + * `TOO_HEATED`: The issue or pull request was locked because the + conversation was too heated. + """ + + __schema__ = github_schema + __choices__ = ("OFF_TOPIC", "RESOLVED", "SPAM", "TOO_HEATED") + + +class MergeStateStatus(sgqlc.types.Enum): + """Detailed status information about a pull request merge. + + Enumeration Choices: + + * `BEHIND`: The head ref is out of date. + * `BLOCKED`: The merge is blocked. + * `CLEAN`: Mergeable and passing commit status. + * `DIRTY`: The merge commit cannot be cleanly created. + * `HAS_HOOKS`: Mergeable with passing commit status and pre- + receive hooks. + * `UNKNOWN`: The state cannot currently be determined. + * `UNSTABLE`: Mergeable with non-passing commit status. + """ + + __schema__ = github_schema + __choices__ = ("BEHIND", "BLOCKED", "CLEAN", "DIRTY", "HAS_HOOKS", "UNKNOWN", "UNSTABLE") + + +class MergeableState(sgqlc.types.Enum): + """Whether or not a PullRequest can be merged. + + Enumeration Choices: + + * `CONFLICTING`: The pull request cannot be merged due to merge + conflicts. + * `MERGEABLE`: The pull request can be merged. + * `UNKNOWN`: The mergeability of the pull request is still being + calculated. + """ + + __schema__ = github_schema + __choices__ = ("CONFLICTING", "MERGEABLE", "UNKNOWN") + + +class MigrationSourceType(sgqlc.types.Enum): + """Represents the different Octoshift migration sources. + + Enumeration Choices: + + * `AZURE_DEVOPS`: An Azure DevOps migration source. + * `BITBUCKET_SERVER`: A Bitbucket Server migration source. + * `GITHUB`: A GitHub migration source. + * `GITHUB_ARCHIVE`: A GitHub Migration API source. + * `GITLAB`: A GitLab migration source. + """ + + __schema__ = github_schema + __choices__ = ("AZURE_DEVOPS", "BITBUCKET_SERVER", "GITHUB", "GITHUB_ARCHIVE", "GITLAB") + + +class MigrationState(sgqlc.types.Enum): + """The Octoshift migration state. + + Enumeration Choices: + + * `FAILED`: The Octoshift migration has failed. + * `FAILED_VALIDATION`: The Octoshift migration has invalid + credentials. + * `IN_PROGRESS`: The Octoshift migration is in progress. + * `NOT_STARTED`: The Octoshift migration has not started. + * `PENDING_VALIDATION`: The Octoshift migration needs to have its + credentials validated. + * `QUEUED`: The Octoshift migration has been queued. + * `SUCCEEDED`: The Octoshift migration has succeeded. + """ + + __schema__ = github_schema + __choices__ = ("FAILED", "FAILED_VALIDATION", "IN_PROGRESS", "NOT_STARTED", "PENDING_VALIDATION", "QUEUED", "SUCCEEDED") + + +class MilestoneOrderField(sgqlc.types.Enum): + """Properties by which milestone connections can be ordered. + + Enumeration Choices: + + * `CREATED_AT`: Order milestones by when they were created. + * `DUE_DATE`: Order milestones by when they are due. + * `NUMBER`: Order milestones by their number. + * `UPDATED_AT`: Order milestones by when they were last updated. + """ + + __schema__ = github_schema + __choices__ = ("CREATED_AT", "DUE_DATE", "NUMBER", "UPDATED_AT") + + +class MilestoneState(sgqlc.types.Enum): + """The possible states of a milestone. + + Enumeration Choices: + + * `CLOSED`: A milestone that has been closed. + * `OPEN`: A milestone that is still open. + """ + + __schema__ = github_schema + __choices__ = ("CLOSED", "OPEN") + + +class NotificationRestrictionSettingValue(sgqlc.types.Enum): + """The possible values for the notification restriction setting. + + Enumeration Choices: + + * `DISABLED`: The setting is disabled for the owner. + * `ENABLED`: The setting is enabled for the owner. + """ + + __schema__ = github_schema + __choices__ = ("DISABLED", "ENABLED") + + +class OIDCProviderType(sgqlc.types.Enum): + """The OIDC identity provider type + + Enumeration Choices: + + * `AAD`: Azure Active Directory + """ + + __schema__ = github_schema + __choices__ = ("AAD",) + + +class OauthApplicationCreateAuditEntryState(sgqlc.types.Enum): + """The state of an OAuth Application when it was created. + + Enumeration Choices: + + * `ACTIVE`: The OAuth Application was active and allowed to have + OAuth Accesses. + * `PENDING_DELETION`: The OAuth Application was in the process of + being deleted. + * `SUSPENDED`: The OAuth Application was suspended from generating + OAuth Accesses due to abuse or security concerns. + """ + + __schema__ = github_schema + __choices__ = ("ACTIVE", "PENDING_DELETION", "SUSPENDED") + + +class OperationType(sgqlc.types.Enum): + """The corresponding operation type for the action + + Enumeration Choices: + + * `ACCESS`: An existing resource was accessed + * `AUTHENTICATION`: A resource performed an authentication event + * `CREATE`: A new resource was created + * `MODIFY`: An existing resource was modified + * `REMOVE`: An existing resource was removed + * `RESTORE`: An existing resource was restored + * `TRANSFER`: An existing resource was transferred between + multiple resources + """ + + __schema__ = github_schema + __choices__ = ("ACCESS", "AUTHENTICATION", "CREATE", "MODIFY", "REMOVE", "RESTORE", "TRANSFER") + + +class OrderDirection(sgqlc.types.Enum): + """Possible directions in which to order a list of items when + provided an `orderBy` argument. + + Enumeration Choices: + + * `ASC`: Specifies an ascending order for a given `orderBy` + argument. + * `DESC`: Specifies a descending order for a given `orderBy` + argument. + """ + + __schema__ = github_schema + __choices__ = ("ASC", "DESC") + + +class OrgAddMemberAuditEntryPermission(sgqlc.types.Enum): + """The permissions available to members on an Organization. + + Enumeration Choices: + + * `ADMIN`: Can read, clone, push, and add collaborators to + repositories. + * `READ`: Can read and clone repositories. + """ + + __schema__ = github_schema + __choices__ = ("ADMIN", "READ") + + +class OrgCreateAuditEntryBillingPlan(sgqlc.types.Enum): + """The billing plans available for organizations. + + Enumeration Choices: + + * `BUSINESS`: Team Plan + * `BUSINESS_PLUS`: Enterprise Cloud Plan + * `FREE`: Free Plan + * `TIERED_PER_SEAT`: Tiered Per Seat Plan + * `UNLIMITED`: Legacy Unlimited Plan + """ + + __schema__ = github_schema + __choices__ = ("BUSINESS", "BUSINESS_PLUS", "FREE", "TIERED_PER_SEAT", "UNLIMITED") + + +class OrgEnterpriseOwnerOrderField(sgqlc.types.Enum): + """Properties by which enterprise owners can be ordered. + + Enumeration Choices: + + * `LOGIN`: Order enterprise owners by login. + """ + + __schema__ = github_schema + __choices__ = ("LOGIN",) + + +class OrgRemoveBillingManagerAuditEntryReason(sgqlc.types.Enum): + """The reason a billing manager was removed from an Organization. + + Enumeration Choices: + + * `SAML_EXTERNAL_IDENTITY_MISSING`: SAML external identity missing + * `SAML_SSO_ENFORCEMENT_REQUIRES_EXTERNAL_IDENTITY`: SAML SSO + enforcement requires an external identity + * `TWO_FACTOR_REQUIREMENT_NON_COMPLIANCE`: The organization + required 2FA of its billing managers and this user did not have + 2FA enabled. + """ + + __schema__ = github_schema + __choices__ = ( + "SAML_EXTERNAL_IDENTITY_MISSING", + "SAML_SSO_ENFORCEMENT_REQUIRES_EXTERNAL_IDENTITY", + "TWO_FACTOR_REQUIREMENT_NON_COMPLIANCE", + ) + + +class OrgRemoveMemberAuditEntryMembershipType(sgqlc.types.Enum): + """The type of membership a user has with an Organization. + + Enumeration Choices: + + * `ADMIN`: Organization administrators have full access and can + change several settings, including the names of repositories + that belong to the Organization and Owners team membership. In + addition, organization admins can delete the organization and + all of its repositories. + * `BILLING_MANAGER`: A billing manager is a user who manages the + billing settings for the Organization, such as updating payment + information. + * `DIRECT_MEMBER`: A direct member is a user that is a member of + the Organization. + * `OUTSIDE_COLLABORATOR`: An outside collaborator is a person who + isn't explicitly a member of the Organization, but who has Read, + Write, or Admin permissions to one or more repositories in the + organization. + * `SUSPENDED`: A suspended member. + * `UNAFFILIATED`: An unaffiliated collaborator is a person who is + not a member of the Organization and does not have access to any + repositories in the Organization. + """ + + __schema__ = github_schema + __choices__ = ("ADMIN", "BILLING_MANAGER", "DIRECT_MEMBER", "OUTSIDE_COLLABORATOR", "SUSPENDED", "UNAFFILIATED") + + +class OrgRemoveMemberAuditEntryReason(sgqlc.types.Enum): + """The reason a member was removed from an Organization. + + Enumeration Choices: + + * `SAML_EXTERNAL_IDENTITY_MISSING`: SAML external identity missing + * `SAML_SSO_ENFORCEMENT_REQUIRES_EXTERNAL_IDENTITY`: SAML SSO + enforcement requires an external identity + * `TWO_FACTOR_ACCOUNT_RECOVERY`: User was removed from + organization during account recovery + * `TWO_FACTOR_REQUIREMENT_NON_COMPLIANCE`: The organization + required 2FA of its billing managers and this user did not have + 2FA enabled. + * `USER_ACCOUNT_DELETED`: User account has been deleted + """ + + __schema__ = github_schema + __choices__ = ( + "SAML_EXTERNAL_IDENTITY_MISSING", + "SAML_SSO_ENFORCEMENT_REQUIRES_EXTERNAL_IDENTITY", + "TWO_FACTOR_ACCOUNT_RECOVERY", + "TWO_FACTOR_REQUIREMENT_NON_COMPLIANCE", + "USER_ACCOUNT_DELETED", + ) + + +class OrgRemoveOutsideCollaboratorAuditEntryMembershipType(sgqlc.types.Enum): + """The type of membership a user has with an Organization. + + Enumeration Choices: + + * `BILLING_MANAGER`: A billing manager is a user who manages the + billing settings for the Organization, such as updating payment + information. + * `OUTSIDE_COLLABORATOR`: An outside collaborator is a person who + isn't explicitly a member of the Organization, but who has Read, + Write, or Admin permissions to one or more repositories in the + organization. + * `UNAFFILIATED`: An unaffiliated collaborator is a person who is + not a member of the Organization and does not have access to any + repositories in the organization. + """ + + __schema__ = github_schema + __choices__ = ("BILLING_MANAGER", "OUTSIDE_COLLABORATOR", "UNAFFILIATED") + + +class OrgRemoveOutsideCollaboratorAuditEntryReason(sgqlc.types.Enum): + """The reason an outside collaborator was removed from an + Organization. + + Enumeration Choices: + + * `SAML_EXTERNAL_IDENTITY_MISSING`: SAML external identity missing + * `TWO_FACTOR_REQUIREMENT_NON_COMPLIANCE`: The organization + required 2FA of its billing managers and this user did not have + 2FA enabled. + """ + + __schema__ = github_schema + __choices__ = ("SAML_EXTERNAL_IDENTITY_MISSING", "TWO_FACTOR_REQUIREMENT_NON_COMPLIANCE") + + +class OrgUpdateDefaultRepositoryPermissionAuditEntryPermission(sgqlc.types.Enum): + """The default permission a repository can have in an Organization. + + Enumeration Choices: + + * `ADMIN`: Can read, clone, push, and add collaborators to + repositories. + * `NONE`: No default permission value. + * `READ`: Can read and clone repositories. + * `WRITE`: Can read, clone and push to repositories. + """ + + __schema__ = github_schema + __choices__ = ("ADMIN", "NONE", "READ", "WRITE") + + +class OrgUpdateMemberAuditEntryPermission(sgqlc.types.Enum): + """The permissions available to members on an Organization. + + Enumeration Choices: + + * `ADMIN`: Can read, clone, push, and add collaborators to + repositories. + * `READ`: Can read and clone repositories. + """ + + __schema__ = github_schema + __choices__ = ("ADMIN", "READ") + + +class OrgUpdateMemberRepositoryCreationPermissionAuditEntryVisibility(sgqlc.types.Enum): + """The permissions available for repository creation on an + Organization. + + Enumeration Choices: + + * `ALL`: All organization members are restricted from creating any + repositories. + * `INTERNAL`: All organization members are restricted from + creating internal repositories. + * `NONE`: All organization members are allowed to create any + repositories. + * `PRIVATE`: All organization members are restricted from creating + private repositories. + * `PRIVATE_INTERNAL`: All organization members are restricted from + creating private or internal repositories. + * `PUBLIC`: All organization members are restricted from creating + public repositories. + * `PUBLIC_INTERNAL`: All organization members are restricted from + creating public or internal repositories. + * `PUBLIC_PRIVATE`: All organization members are restricted from + creating public or private repositories. + """ + + __schema__ = github_schema + __choices__ = ("ALL", "INTERNAL", "NONE", "PRIVATE", "PRIVATE_INTERNAL", "PUBLIC", "PUBLIC_INTERNAL", "PUBLIC_PRIVATE") + + +class OrganizationInvitationRole(sgqlc.types.Enum): + """The possible organization invitation roles. + + Enumeration Choices: + + * `ADMIN`: The user is invited to be an admin of the organization. + * `BILLING_MANAGER`: The user is invited to be a billing manager + of the organization. + * `DIRECT_MEMBER`: The user is invited to be a direct member of + the organization. + * `REINSTATE`: The user's previous role will be reinstated. + """ + + __schema__ = github_schema + __choices__ = ("ADMIN", "BILLING_MANAGER", "DIRECT_MEMBER", "REINSTATE") + + +class OrganizationInvitationType(sgqlc.types.Enum): + """The possible organization invitation types. + + Enumeration Choices: + + * `EMAIL`: The invitation was to an email address. + * `USER`: The invitation was to an existing user. + """ + + __schema__ = github_schema + __choices__ = ("EMAIL", "USER") + + +class OrganizationMemberRole(sgqlc.types.Enum): + """The possible roles within an organization for its members. + + Enumeration Choices: + + * `ADMIN`: The user is an administrator of the organization. + * `MEMBER`: The user is a member of the organization. + """ + + __schema__ = github_schema + __choices__ = ("ADMIN", "MEMBER") + + +class OrganizationMembersCanCreateRepositoriesSettingValue(sgqlc.types.Enum): + """The possible values for the members can create repositories + setting on an organization. + + Enumeration Choices: + + * `ALL`: Members will be able to create public and private + repositories. + * `DISABLED`: Members will not be able to create public or private + repositories. + * `INTERNAL`: Members will be able to create only internal + repositories. + * `PRIVATE`: Members will be able to create only private + repositories. + """ + + __schema__ = github_schema + __choices__ = ("ALL", "DISABLED", "INTERNAL", "PRIVATE") + + +class OrganizationOrderField(sgqlc.types.Enum): + """Properties by which organization connections can be ordered. + + Enumeration Choices: + + * `CREATED_AT`: Order organizations by creation time + * `LOGIN`: Order organizations by login + """ + + __schema__ = github_schema + __choices__ = ("CREATED_AT", "LOGIN") + + +class PackageFileOrderField(sgqlc.types.Enum): + """Properties by which package file connections can be ordered. + + Enumeration Choices: + + * `CREATED_AT`: Order package files by creation time + """ + + __schema__ = github_schema + __choices__ = ("CREATED_AT",) + + +class PackageOrderField(sgqlc.types.Enum): + """Properties by which package connections can be ordered. + + Enumeration Choices: + + * `CREATED_AT`: Order packages by creation time + """ + + __schema__ = github_schema + __choices__ = ("CREATED_AT",) + + +class PackageType(sgqlc.types.Enum): + """The possible types of a package. + + Enumeration Choices: + + * `DEBIAN`: A debian package. + * `MAVEN`: A maven package. + * `NPM`: An npm package. + * `NUGET`: A nuget package. + * `PYPI`: A python package. + * `RUBYGEMS`: A rubygems package. + """ + + __schema__ = github_schema + __choices__ = ("DEBIAN", "MAVEN", "NPM", "NUGET", "PYPI", "RUBYGEMS") + + +class PackageVersionOrderField(sgqlc.types.Enum): + """Properties by which package version connections can be ordered. + + Enumeration Choices: + + * `CREATED_AT`: Order package versions by creation time + """ + + __schema__ = github_schema + __choices__ = ("CREATED_AT",) + + +class PatchStatus(sgqlc.types.Enum): + """The possible types of patch statuses. + + Enumeration Choices: + + * `ADDED`: The file was added. Git status 'A'. + * `CHANGED`: The file's type was changed. Git status 'T'. + * `COPIED`: The file was copied. Git status 'C'. + * `DELETED`: The file was deleted. Git status 'D'. + * `MODIFIED`: The file's contents were changed. Git status 'M'. + * `RENAMED`: The file was renamed. Git status 'R'. + """ + + __schema__ = github_schema + __choices__ = ("ADDED", "CHANGED", "COPIED", "DELETED", "MODIFIED", "RENAMED") + + +class PinnableItemType(sgqlc.types.Enum): + """Represents items that can be pinned to a profile page or + dashboard. + + Enumeration Choices: + + * `GIST`: A gist. + * `ISSUE`: An issue. + * `ORGANIZATION`: An organization. + * `PROJECT`: A project. + * `PULL_REQUEST`: A pull request. + * `REPOSITORY`: A repository. + * `TEAM`: A team. + * `USER`: A user. + """ + + __schema__ = github_schema + __choices__ = ("GIST", "ISSUE", "ORGANIZATION", "PROJECT", "PULL_REQUEST", "REPOSITORY", "TEAM", "USER") + + +class PinnedDiscussionGradient(sgqlc.types.Enum): + """Preconfigured gradients that may be used to style discussions + pinned within a repository. + + Enumeration Choices: + + * `BLUE_MINT`: A gradient of blue to mint + * `BLUE_PURPLE`: A gradient of blue to purple + * `PINK_BLUE`: A gradient of pink to blue + * `PURPLE_CORAL`: A gradient of purple to coral + * `RED_ORANGE`: A gradient of red to orange + """ + + __schema__ = github_schema + __choices__ = ("BLUE_MINT", "BLUE_PURPLE", "PINK_BLUE", "PURPLE_CORAL", "RED_ORANGE") + + +class PinnedDiscussionPattern(sgqlc.types.Enum): + """Preconfigured background patterns that may be used to style + discussions pinned within a repository. + + Enumeration Choices: + + * `CHEVRON_UP`: An upward-facing chevron pattern + * `DOT`: A hollow dot pattern + * `DOT_FILL`: A solid dot pattern + * `HEART_FILL`: A heart pattern + * `PLUS`: A plus sign pattern + * `ZAP`: A lightning bolt pattern + """ + + __schema__ = github_schema + __choices__ = ("CHEVRON_UP", "DOT", "DOT_FILL", "HEART_FILL", "PLUS", "ZAP") + + +class PreciseDateTime(sgqlc.types.Scalar): + """An ISO-8601 encoded UTC date string with millisecond precision.""" + + __schema__ = github_schema + + +class ProjectCardArchivedState(sgqlc.types.Enum): + """The possible archived states of a project card. + + Enumeration Choices: + + * `ARCHIVED`: A project card that is archived + * `NOT_ARCHIVED`: A project card that is not archived + """ + + __schema__ = github_schema + __choices__ = ("ARCHIVED", "NOT_ARCHIVED") + + +class ProjectCardState(sgqlc.types.Enum): + """Various content states of a ProjectCard + + Enumeration Choices: + + * `CONTENT_ONLY`: The card has content only. + * `NOTE_ONLY`: The card has a note only. + * `REDACTED`: The card is redacted. + """ + + __schema__ = github_schema + __choices__ = ("CONTENT_ONLY", "NOTE_ONLY", "REDACTED") + + +class ProjectColumnPurpose(sgqlc.types.Enum): + """The semantic purpose of the column - todo, in progress, or done. + + Enumeration Choices: + + * `DONE`: The column contains cards which are complete + * `IN_PROGRESS`: The column contains cards which are currently + being worked on + * `TODO`: The column contains cards still to be worked on + """ + + __schema__ = github_schema + __choices__ = ("DONE", "IN_PROGRESS", "TODO") + + +class ProjectItemType(sgqlc.types.Enum): + """The type of a project item. + + Enumeration Choices: + + * `DRAFT_ISSUE`: Draft Issue + * `ISSUE`: Issue + * `PULL_REQUEST`: Pull Request + * `REDACTED`: Redacted Item + """ + + __schema__ = github_schema + __choices__ = ("DRAFT_ISSUE", "ISSUE", "PULL_REQUEST", "REDACTED") + + +class ProjectNextFieldType(sgqlc.types.Enum): + """The type of a project next field. + + Enumeration Choices: + + * `ASSIGNEES`: Assignees + * `DATE`: Date + * `ITERATION`: Iteration + * `LABELS`: Labels + * `LINKED_PULL_REQUESTS`: Linked Pull Requests + * `MILESTONE`: Milestone + * `NUMBER`: Number + * `REPOSITORY`: Repository + * `REVIEWERS`: Reviewers + * `SINGLE_SELECT`: Single Select + * `TEXT`: Text + * `TITLE`: Title + * `TRACKS`: Tracks + """ + + __schema__ = github_schema + __choices__ = ( + "ASSIGNEES", + "DATE", + "ITERATION", + "LABELS", + "LINKED_PULL_REQUESTS", + "MILESTONE", + "NUMBER", + "REPOSITORY", + "REVIEWERS", + "SINGLE_SELECT", + "TEXT", + "TITLE", + "TRACKS", + ) + + +class ProjectNextOrderField(sgqlc.types.Enum): + """Properties by which the return project can be ordered. + + Enumeration Choices: + + * `CREATED_AT`: The project's date and time of creation + * `NUMBER`: The project's number + * `TITLE`: The project's title + * `UPDATED_AT`: The project's date and time of update + """ + + __schema__ = github_schema + __choices__ = ("CREATED_AT", "NUMBER", "TITLE", "UPDATED_AT") + + +class ProjectOrderField(sgqlc.types.Enum): + """Properties by which project connections can be ordered. + + Enumeration Choices: + + * `CREATED_AT`: Order projects by creation time + * `NAME`: Order projects by name + * `UPDATED_AT`: Order projects by update time + """ + + __schema__ = github_schema + __choices__ = ("CREATED_AT", "NAME", "UPDATED_AT") + + +class ProjectState(sgqlc.types.Enum): + """State of the project; either 'open' or 'closed' + + Enumeration Choices: + + * `CLOSED`: The project is closed. + * `OPEN`: The project is open. + """ + + __schema__ = github_schema + __choices__ = ("CLOSED", "OPEN") + + +class ProjectTemplate(sgqlc.types.Enum): + """GitHub-provided templates for Projects + + Enumeration Choices: + + * `AUTOMATED_KANBAN_V2`: Create a board with v2 triggers to + automatically move cards across To do, In progress and Done + columns. + * `AUTOMATED_REVIEWS_KANBAN`: Create a board with triggers to + automatically move cards across columns with review automation. + * `BASIC_KANBAN`: Create a board with columns for To do, In + progress and Done. + * `BUG_TRIAGE`: Create a board to triage and prioritize bugs with + To do, priority, and Done columns. + """ + + __schema__ = github_schema + __choices__ = ("AUTOMATED_KANBAN_V2", "AUTOMATED_REVIEWS_KANBAN", "BASIC_KANBAN", "BUG_TRIAGE") + + +class ProjectViewLayout(sgqlc.types.Enum): + """The layout of a project view. + + Enumeration Choices: + + * `BOARD_LAYOUT`: Board layout + * `TABLE_LAYOUT`: Table layout + """ + + __schema__ = github_schema + __choices__ = ("BOARD_LAYOUT", "TABLE_LAYOUT") + + +class PullRequestMergeMethod(sgqlc.types.Enum): + """Represents available types of methods to use when merging a pull + request. + + Enumeration Choices: + + * `MERGE`: Add all commits from the head branch to the base branch + with a merge commit. + * `REBASE`: Add all commits from the head branch onto the base + branch individually. + * `SQUASH`: Combine all commits from the head branch into a single + commit in the base branch. + """ + + __schema__ = github_schema + __choices__ = ("MERGE", "REBASE", "SQUASH") + + +class PullRequestOrderField(sgqlc.types.Enum): + """Properties by which pull_requests connections can be ordered. + + Enumeration Choices: + + * `CREATED_AT`: Order pull_requests by creation time + * `UPDATED_AT`: Order pull_requests by update time + """ + + __schema__ = github_schema + __choices__ = ("CREATED_AT", "UPDATED_AT") + + +class PullRequestReviewCommentState(sgqlc.types.Enum): + """The possible states of a pull request review comment. + + Enumeration Choices: + + * `PENDING`: A comment that is part of a pending review + * `SUBMITTED`: A comment that is part of a submitted review + """ + + __schema__ = github_schema + __choices__ = ("PENDING", "SUBMITTED") + + +class PullRequestReviewDecision(sgqlc.types.Enum): + """The review status of a pull request. + + Enumeration Choices: + + * `APPROVED`: The pull request has received an approving review. + * `CHANGES_REQUESTED`: Changes have been requested on the pull + request. + * `REVIEW_REQUIRED`: A review is required before the pull request + can be merged. + """ + + __schema__ = github_schema + __choices__ = ("APPROVED", "CHANGES_REQUESTED", "REVIEW_REQUIRED") + + +class PullRequestReviewEvent(sgqlc.types.Enum): + """The possible events to perform on a pull request review. + + Enumeration Choices: + + * `APPROVE`: Submit feedback and approve merging these changes. + * `COMMENT`: Submit general feedback without explicit approval. + * `DISMISS`: Dismiss review so it now longer effects merging. + * `REQUEST_CHANGES`: Submit feedback that must be addressed before + merging. + """ + + __schema__ = github_schema + __choices__ = ("APPROVE", "COMMENT", "DISMISS", "REQUEST_CHANGES") + + +class PullRequestReviewState(sgqlc.types.Enum): + """The possible states of a pull request review. + + Enumeration Choices: + + * `APPROVED`: A review allowing the pull request to merge. + * `CHANGES_REQUESTED`: A review blocking the pull request from + merging. + * `COMMENTED`: An informational review. + * `DISMISSED`: A review that has been dismissed. + * `PENDING`: A review that has not yet been submitted. + """ + + __schema__ = github_schema + __choices__ = ("APPROVED", "CHANGES_REQUESTED", "COMMENTED", "DISMISSED", "PENDING") + + +class PullRequestState(sgqlc.types.Enum): + """The possible states of a pull request. + + Enumeration Choices: + + * `CLOSED`: A pull request that has been closed without being + merged. + * `MERGED`: A pull request that has been closed by being merged. + * `OPEN`: A pull request that is still open. + """ + + __schema__ = github_schema + __choices__ = ("CLOSED", "MERGED", "OPEN") + + +class PullRequestTimelineItemsItemType(sgqlc.types.Enum): + """The possible item types found in a timeline. + + Enumeration Choices: + + * `ADDED_TO_MERGE_QUEUE_EVENT`: Represents an + 'added_to_merge_queue' event on a given pull request. + * `ADDED_TO_PROJECT_EVENT`: Represents a 'added_to_project' event + on a given issue or pull request. + * `ASSIGNED_EVENT`: Represents an 'assigned' event on any + assignable object. + * `AUTOMATIC_BASE_CHANGE_FAILED_EVENT`: Represents a + 'automatic_base_change_failed' event on a given pull request. + * `AUTOMATIC_BASE_CHANGE_SUCCEEDED_EVENT`: Represents a + 'automatic_base_change_succeeded' event on a given pull request. + * `AUTO_MERGE_DISABLED_EVENT`: Represents a 'auto_merge_disabled' + event on a given pull request. + * `AUTO_MERGE_ENABLED_EVENT`: Represents a 'auto_merge_enabled' + event on a given pull request. + * `AUTO_REBASE_ENABLED_EVENT`: Represents a 'auto_rebase_enabled' + event on a given pull request. + * `AUTO_SQUASH_ENABLED_EVENT`: Represents a 'auto_squash_enabled' + event on a given pull request. + * `BASE_REF_CHANGED_EVENT`: Represents a 'base_ref_changed' event + on a given issue or pull request. + * `BASE_REF_DELETED_EVENT`: Represents a 'base_ref_deleted' event + on a given pull request. + * `BASE_REF_FORCE_PUSHED_EVENT`: Represents a + 'base_ref_force_pushed' event on a given pull request. + * `CLOSED_EVENT`: Represents a 'closed' event on any `Closable`. + * `COMMENT_DELETED_EVENT`: Represents a 'comment_deleted' event on + a given issue or pull request. + * `CONNECTED_EVENT`: Represents a 'connected' event on a given + issue or pull request. + * `CONVERTED_NOTE_TO_ISSUE_EVENT`: Represents a + 'converted_note_to_issue' event on a given issue or pull + request. + * `CONVERTED_TO_DISCUSSION_EVENT`: Represents a + 'converted_to_discussion' event on a given issue. + * `CONVERT_TO_DRAFT_EVENT`: Represents a 'convert_to_draft' event + on a given pull request. + * `CROSS_REFERENCED_EVENT`: Represents a mention made by one issue + or pull request to another. + * `DEMILESTONED_EVENT`: Represents a 'demilestoned' event on a + given issue or pull request. + * `DEPLOYED_EVENT`: Represents a 'deployed' event on a given pull + request. + * `DEPLOYMENT_ENVIRONMENT_CHANGED_EVENT`: Represents a + 'deployment_environment_changed' event on a given pull request. + * `DISCONNECTED_EVENT`: Represents a 'disconnected' event on a + given issue or pull request. + * `HEAD_REF_DELETED_EVENT`: Represents a 'head_ref_deleted' event + on a given pull request. + * `HEAD_REF_FORCE_PUSHED_EVENT`: Represents a + 'head_ref_force_pushed' event on a given pull request. + * `HEAD_REF_RESTORED_EVENT`: Represents a 'head_ref_restored' + event on a given pull request. + * `ISSUE_COMMENT`: Represents a comment on an Issue. + * `LABELED_EVENT`: Represents a 'labeled' event on a given issue + or pull request. + * `LOCKED_EVENT`: Represents a 'locked' event on a given issue or + pull request. + * `MARKED_AS_DUPLICATE_EVENT`: Represents a 'marked_as_duplicate' + event on a given issue or pull request. + * `MENTIONED_EVENT`: Represents a 'mentioned' event on a given + issue or pull request. + * `MERGED_EVENT`: Represents a 'merged' event on a given pull + request. + * `MILESTONED_EVENT`: Represents a 'milestoned' event on a given + issue or pull request. + * `MOVED_COLUMNS_IN_PROJECT_EVENT`: Represents a + 'moved_columns_in_project' event on a given issue or pull + request. + * `PINNED_EVENT`: Represents a 'pinned' event on a given issue or + pull request. + * `PULL_REQUEST_COMMIT`: Represents a Git commit part of a pull + request. + * `PULL_REQUEST_COMMIT_COMMENT_THREAD`: Represents a commit + comment thread part of a pull request. + * `PULL_REQUEST_REVIEW`: A review object for a given pull request. + * `PULL_REQUEST_REVIEW_THREAD`: A threaded list of comments for a + given pull request. + * `PULL_REQUEST_REVISION_MARKER`: Represents the latest point in + the pull request timeline for which the viewer has seen the pull + request's commits. + * `READY_FOR_REVIEW_EVENT`: Represents a 'ready_for_review' event + on a given pull request. + * `REFERENCED_EVENT`: Represents a 'referenced' event on a given + `ReferencedSubject`. + * `REMOVED_FROM_MERGE_QUEUE_EVENT`: Represents a + 'removed_from_merge_queue' event on a given pull request. + * `REMOVED_FROM_PROJECT_EVENT`: Represents a + 'removed_from_project' event on a given issue or pull request. + * `RENAMED_TITLE_EVENT`: Represents a 'renamed' event on a given + issue or pull request + * `REOPENED_EVENT`: Represents a 'reopened' event on any + `Closable`. + * `REVIEW_DISMISSED_EVENT`: Represents a 'review_dismissed' event + on a given issue or pull request. + * `REVIEW_REQUESTED_EVENT`: Represents an 'review_requested' event + on a given pull request. + * `REVIEW_REQUEST_REMOVED_EVENT`: Represents an + 'review_request_removed' event on a given pull request. + * `SUBSCRIBED_EVENT`: Represents a 'subscribed' event on a given + `Subscribable`. + * `TRANSFERRED_EVENT`: Represents a 'transferred' event on a given + issue or pull request. + * `UNASSIGNED_EVENT`: Represents an 'unassigned' event on any + assignable object. + * `UNLABELED_EVENT`: Represents an 'unlabeled' event on a given + issue or pull request. + * `UNLOCKED_EVENT`: Represents an 'unlocked' event on a given + issue or pull request. + * `UNMARKED_AS_DUPLICATE_EVENT`: Represents an + 'unmarked_as_duplicate' event on a given issue or pull request. + * `UNPINNED_EVENT`: Represents an 'unpinned' event on a given + issue or pull request. + * `UNSUBSCRIBED_EVENT`: Represents an 'unsubscribed' event on a + given `Subscribable`. + * `USER_BLOCKED_EVENT`: Represents a 'user_blocked' event on a + given user. + """ + + __schema__ = github_schema + __choices__ = ( + "ADDED_TO_MERGE_QUEUE_EVENT", + "ADDED_TO_PROJECT_EVENT", + "ASSIGNED_EVENT", + "AUTOMATIC_BASE_CHANGE_FAILED_EVENT", + "AUTOMATIC_BASE_CHANGE_SUCCEEDED_EVENT", + "AUTO_MERGE_DISABLED_EVENT", + "AUTO_MERGE_ENABLED_EVENT", + "AUTO_REBASE_ENABLED_EVENT", + "AUTO_SQUASH_ENABLED_EVENT", + "BASE_REF_CHANGED_EVENT", + "BASE_REF_DELETED_EVENT", + "BASE_REF_FORCE_PUSHED_EVENT", + "CLOSED_EVENT", + "COMMENT_DELETED_EVENT", + "CONNECTED_EVENT", + "CONVERTED_NOTE_TO_ISSUE_EVENT", + "CONVERTED_TO_DISCUSSION_EVENT", + "CONVERT_TO_DRAFT_EVENT", + "CROSS_REFERENCED_EVENT", + "DEMILESTONED_EVENT", + "DEPLOYED_EVENT", + "DEPLOYMENT_ENVIRONMENT_CHANGED_EVENT", + "DISCONNECTED_EVENT", + "HEAD_REF_DELETED_EVENT", + "HEAD_REF_FORCE_PUSHED_EVENT", + "HEAD_REF_RESTORED_EVENT", + "ISSUE_COMMENT", + "LABELED_EVENT", + "LOCKED_EVENT", + "MARKED_AS_DUPLICATE_EVENT", + "MENTIONED_EVENT", + "MERGED_EVENT", + "MILESTONED_EVENT", + "MOVED_COLUMNS_IN_PROJECT_EVENT", + "PINNED_EVENT", + "PULL_REQUEST_COMMIT", + "PULL_REQUEST_COMMIT_COMMENT_THREAD", + "PULL_REQUEST_REVIEW", + "PULL_REQUEST_REVIEW_THREAD", + "PULL_REQUEST_REVISION_MARKER", + "READY_FOR_REVIEW_EVENT", + "REFERENCED_EVENT", + "REMOVED_FROM_MERGE_QUEUE_EVENT", + "REMOVED_FROM_PROJECT_EVENT", + "RENAMED_TITLE_EVENT", + "REOPENED_EVENT", + "REVIEW_DISMISSED_EVENT", + "REVIEW_REQUESTED_EVENT", + "REVIEW_REQUEST_REMOVED_EVENT", + "SUBSCRIBED_EVENT", + "TRANSFERRED_EVENT", + "UNASSIGNED_EVENT", + "UNLABELED_EVENT", + "UNLOCKED_EVENT", + "UNMARKED_AS_DUPLICATE_EVENT", + "UNPINNED_EVENT", + "UNSUBSCRIBED_EVENT", + "USER_BLOCKED_EVENT", + ) + + +class PullRequestUpdateState(sgqlc.types.Enum): + """The possible target states when updating a pull request. + + Enumeration Choices: + + * `CLOSED`: A pull request that has been closed without being + merged. + * `OPEN`: A pull request that is still open. + """ + + __schema__ = github_schema + __choices__ = ("CLOSED", "OPEN") + + +class ReactionContent(sgqlc.types.Enum): + """Emojis that can be attached to Issues, Pull Requests and Comments. + + Enumeration Choices: + + * `CONFUSED`: Represents the `:confused:` emoji. + * `EYES`: Represents the `:eyes:` emoji. + * `HEART`: Represents the `:heart:` emoji. + * `HOORAY`: Represents the `:hooray:` emoji. + * `LAUGH`: Represents the `:laugh:` emoji. + * `ROCKET`: Represents the `:rocket:` emoji. + * `THUMBS_DOWN`: Represents the `:-1:` emoji. + * `THUMBS_UP`: Represents the `:+1:` emoji. + """ + + __schema__ = github_schema + __choices__ = ("CONFUSED", "EYES", "HEART", "HOORAY", "LAUGH", "ROCKET", "THUMBS_DOWN", "THUMBS_UP") + + +class ReactionOrderField(sgqlc.types.Enum): + """A list of fields that reactions can be ordered by. + + Enumeration Choices: + + * `CREATED_AT`: Allows ordering a list of reactions by when they + were created. + """ + + __schema__ = github_schema + __choices__ = ("CREATED_AT",) + + +class RefOrderField(sgqlc.types.Enum): + """Properties by which ref connections can be ordered. + + Enumeration Choices: + + * `ALPHABETICAL`: Order refs by their alphanumeric name + * `TAG_COMMIT_DATE`: Order refs by underlying commit date if the + ref prefix is refs/tags/ + """ + + __schema__ = github_schema + __choices__ = ("ALPHABETICAL", "TAG_COMMIT_DATE") + + +class ReleaseOrderField(sgqlc.types.Enum): + """Properties by which release connections can be ordered. + + Enumeration Choices: + + * `CREATED_AT`: Order releases by creation time + * `NAME`: Order releases alphabetically by name + """ + + __schema__ = github_schema + __choices__ = ("CREATED_AT", "NAME") + + +class RepoAccessAuditEntryVisibility(sgqlc.types.Enum): + """The privacy of a repository + + Enumeration Choices: + + * `INTERNAL`: The repository is visible only to users in the same + business. + * `PRIVATE`: The repository is visible only to those with explicit + access. + * `PUBLIC`: The repository is visible to everyone. + """ + + __schema__ = github_schema + __choices__ = ("INTERNAL", "PRIVATE", "PUBLIC") + + +class RepoAddMemberAuditEntryVisibility(sgqlc.types.Enum): + """The privacy of a repository + + Enumeration Choices: + + * `INTERNAL`: The repository is visible only to users in the same + business. + * `PRIVATE`: The repository is visible only to those with explicit + access. + * `PUBLIC`: The repository is visible to everyone. + """ + + __schema__ = github_schema + __choices__ = ("INTERNAL", "PRIVATE", "PUBLIC") + + +class RepoArchivedAuditEntryVisibility(sgqlc.types.Enum): + """The privacy of a repository + + Enumeration Choices: + + * `INTERNAL`: The repository is visible only to users in the same + business. + * `PRIVATE`: The repository is visible only to those with explicit + access. + * `PUBLIC`: The repository is visible to everyone. + """ + + __schema__ = github_schema + __choices__ = ("INTERNAL", "PRIVATE", "PUBLIC") + + +class RepoChangeMergeSettingAuditEntryMergeType(sgqlc.types.Enum): + """The merge options available for pull requests to this repository. + + Enumeration Choices: + + * `MERGE`: The pull request is added to the base branch in a merge + commit. + * `REBASE`: Commits from the pull request are added onto the base + branch individually without a merge commit. + * `SQUASH`: The pull request's commits are squashed into a single + commit before they are merged to the base branch. + """ + + __schema__ = github_schema + __choices__ = ("MERGE", "REBASE", "SQUASH") + + +class RepoCreateAuditEntryVisibility(sgqlc.types.Enum): + """The privacy of a repository + + Enumeration Choices: + + * `INTERNAL`: The repository is visible only to users in the same + business. + * `PRIVATE`: The repository is visible only to those with explicit + access. + * `PUBLIC`: The repository is visible to everyone. + """ + + __schema__ = github_schema + __choices__ = ("INTERNAL", "PRIVATE", "PUBLIC") + + +class RepoDestroyAuditEntryVisibility(sgqlc.types.Enum): + """The privacy of a repository + + Enumeration Choices: + + * `INTERNAL`: The repository is visible only to users in the same + business. + * `PRIVATE`: The repository is visible only to those with explicit + access. + * `PUBLIC`: The repository is visible to everyone. + """ + + __schema__ = github_schema + __choices__ = ("INTERNAL", "PRIVATE", "PUBLIC") + + +class RepoRemoveMemberAuditEntryVisibility(sgqlc.types.Enum): + """The privacy of a repository + + Enumeration Choices: + + * `INTERNAL`: The repository is visible only to users in the same + business. + * `PRIVATE`: The repository is visible only to those with explicit + access. + * `PUBLIC`: The repository is visible to everyone. + """ + + __schema__ = github_schema + __choices__ = ("INTERNAL", "PRIVATE", "PUBLIC") + + +class ReportedContentClassifiers(sgqlc.types.Enum): + """The reasons a piece of content can be reported or minimized. + + Enumeration Choices: + + * `ABUSE`: An abusive or harassing piece of content + * `DUPLICATE`: A duplicated piece of content + * `OFF_TOPIC`: An irrelevant piece of content + * `OUTDATED`: An outdated piece of content + * `RESOLVED`: The content has been resolved + * `SPAM`: A spammy piece of content + """ + + __schema__ = github_schema + __choices__ = ("ABUSE", "DUPLICATE", "OFF_TOPIC", "OUTDATED", "RESOLVED", "SPAM") + + +class RepositoryAffiliation(sgqlc.types.Enum): + """The affiliation of a user to a repository + + Enumeration Choices: + + * `COLLABORATOR`: Repositories that the user has been added to as + a collaborator. + * `ORGANIZATION_MEMBER`: Repositories that the user has access to + through being a member of an organization. This includes every + repository on every team that the user is on. + * `OWNER`: Repositories that are owned by the authenticated user. + """ + + __schema__ = github_schema + __choices__ = ("COLLABORATOR", "ORGANIZATION_MEMBER", "OWNER") + + +class RepositoryContributionType(sgqlc.types.Enum): + """The reason a repository is listed as 'contributed'. + + Enumeration Choices: + + * `COMMIT`: Created a commit + * `ISSUE`: Created an issue + * `PULL_REQUEST`: Created a pull request + * `PULL_REQUEST_REVIEW`: Reviewed a pull request + * `REPOSITORY`: Created the repository + """ + + __schema__ = github_schema + __choices__ = ("COMMIT", "ISSUE", "PULL_REQUEST", "PULL_REQUEST_REVIEW", "REPOSITORY") + + +class RepositoryInteractionLimit(sgqlc.types.Enum): + """A repository interaction limit. + + Enumeration Choices: + + * `COLLABORATORS_ONLY`: Users that are not collaborators will not + be able to interact with the repository. + * `CONTRIBUTORS_ONLY`: Users that have not previously committed to + a repositoryā€™s default branch will be unable to interact with + the repository. + * `EXISTING_USERS`: Users that have recently created their account + will be unable to interact with the repository. + * `NO_LIMIT`: No interaction limits are enabled. + """ + + __schema__ = github_schema + __choices__ = ("COLLABORATORS_ONLY", "CONTRIBUTORS_ONLY", "EXISTING_USERS", "NO_LIMIT") + + +class RepositoryInteractionLimitExpiry(sgqlc.types.Enum): + """The length for a repository interaction limit to be enabled for. + + Enumeration Choices: + + * `ONE_DAY`: The interaction limit will expire after 1 day. + * `ONE_MONTH`: The interaction limit will expire after 1 month. + * `ONE_WEEK`: The interaction limit will expire after 1 week. + * `SIX_MONTHS`: The interaction limit will expire after 6 months. + * `THREE_DAYS`: The interaction limit will expire after 3 days. + """ + + __schema__ = github_schema + __choices__ = ("ONE_DAY", "ONE_MONTH", "ONE_WEEK", "SIX_MONTHS", "THREE_DAYS") + + +class RepositoryInteractionLimitOrigin(sgqlc.types.Enum): + """Indicates where an interaction limit is configured. + + Enumeration Choices: + + * `ORGANIZATION`: A limit that is configured at the organization + level. + * `REPOSITORY`: A limit that is configured at the repository + level. + * `USER`: A limit that is configured at the user-wide level. + """ + + __schema__ = github_schema + __choices__ = ("ORGANIZATION", "REPOSITORY", "USER") + + +class RepositoryInvitationOrderField(sgqlc.types.Enum): + """Properties by which repository invitation connections can be + ordered. + + Enumeration Choices: + + * `CREATED_AT`: Order repository invitations by creation time + """ + + __schema__ = github_schema + __choices__ = ("CREATED_AT",) + + +class RepositoryLockReason(sgqlc.types.Enum): + """The possible reasons a given repository could be in a locked + state. + + Enumeration Choices: + + * `BILLING`: The repository is locked due to a billing related + reason. + * `MIGRATING`: The repository is locked due to a migration. + * `MOVING`: The repository is locked due to a move. + * `RENAME`: The repository is locked due to a rename. + """ + + __schema__ = github_schema + __choices__ = ("BILLING", "MIGRATING", "MOVING", "RENAME") + + +class RepositoryMigrationOrderDirection(sgqlc.types.Enum): + """Possible directions in which to order a list of repository + migrations when provided an `orderBy` argument. + + Enumeration Choices: + + * `ASC`: Specifies an ascending order for a given `orderBy` + argument. + * `DESC`: Specifies a descending order for a given `orderBy` + argument. + """ + + __schema__ = github_schema + __choices__ = ("ASC", "DESC") + + +class RepositoryMigrationOrderField(sgqlc.types.Enum): + """Properties by which repository migrations can be ordered. + + Enumeration Choices: + + * `CREATED_AT`: Order mannequins why when they were created. + """ + + __schema__ = github_schema + __choices__ = ("CREATED_AT",) + + +class RepositoryOrderField(sgqlc.types.Enum): + """Properties by which repository connections can be ordered. + + Enumeration Choices: + + * `CREATED_AT`: Order repositories by creation time + * `NAME`: Order repositories by name + * `PUSHED_AT`: Order repositories by push time + * `STARGAZERS`: Order repositories by number of stargazers + * `UPDATED_AT`: Order repositories by update time + """ + + __schema__ = github_schema + __choices__ = ("CREATED_AT", "NAME", "PUSHED_AT", "STARGAZERS", "UPDATED_AT") + + +class RepositoryPermission(sgqlc.types.Enum): + """The access level to a repository + + Enumeration Choices: + + * `ADMIN`: Can read, clone, and push to this repository. Can also + manage issues, pull requests, and repository settings, including + adding collaborators + * `MAINTAIN`: Can read, clone, and push to this repository. They + can also manage issues, pull requests, and some repository + settings + * `READ`: Can read and clone this repository. Can also open and + comment on issues and pull requests + * `TRIAGE`: Can read and clone this repository. Can also manage + issues and pull requests + * `WRITE`: Can read, clone, and push to this repository. Can also + manage issues and pull requests + """ + + __schema__ = github_schema + __choices__ = ("ADMIN", "MAINTAIN", "READ", "TRIAGE", "WRITE") + + +class RepositoryPrivacy(sgqlc.types.Enum): + """The privacy of a repository + + Enumeration Choices: + + * `PRIVATE`: Private + * `PUBLIC`: Public + """ + + __schema__ = github_schema + __choices__ = ("PRIVATE", "PUBLIC") + + +class RepositoryVisibility(sgqlc.types.Enum): + """The repository's visibility level. + + Enumeration Choices: + + * `INTERNAL`: The repository is visible only to users in the same + business. + * `PRIVATE`: The repository is visible only to those with explicit + access. + * `PUBLIC`: The repository is visible to everyone. + """ + + __schema__ = github_schema + __choices__ = ("INTERNAL", "PRIVATE", "PUBLIC") + + +class RepositoryVulnerabilityAlertState(sgqlc.types.Enum): + """The possible states of an alert + + Enumeration Choices: + + * `DISMISSED`: An alert that has been manually closed by a user. + * `FIXED`: An alert that has been resolved by a code change. + * `OPEN`: An alert that is still open. + """ + + __schema__ = github_schema + __choices__ = ("DISMISSED", "FIXED", "OPEN") + + +class RequestableCheckStatusState(sgqlc.types.Enum): + """The possible states that can be requested when creating a check + run. + + Enumeration Choices: + + * `COMPLETED`: The check suite or run has been completed. + * `IN_PROGRESS`: The check suite or run is in progress. + * `PENDING`: The check suite or run is in pending state. + * `QUEUED`: The check suite or run has been queued. + * `WAITING`: The check suite or run is in waiting state. + """ + + __schema__ = github_schema + __choices__ = ("COMPLETED", "IN_PROGRESS", "PENDING", "QUEUED", "WAITING") + + +class RoleInOrganization(sgqlc.types.Enum): + """Possible roles a user may have in relation to an organization. + + Enumeration Choices: + + * `DIRECT_MEMBER`: A user who is a direct member of the + organization. + * `OWNER`: A user with full administrative access to the + organization. + * `UNAFFILIATED`: A user who is unaffiliated with the + organization. + """ + + __schema__ = github_schema + __choices__ = ("DIRECT_MEMBER", "OWNER", "UNAFFILIATED") + + +class SamlDigestAlgorithm(sgqlc.types.Enum): + """The possible digest algorithms used to sign SAML requests for an + identity provider. + + Enumeration Choices: + + * `SHA1`: SHA1 + * `SHA256`: SHA256 + * `SHA384`: SHA384 + * `SHA512`: SHA512 + """ + + __schema__ = github_schema + __choices__ = ("SHA1", "SHA256", "SHA384", "SHA512") + + +class SamlSignatureAlgorithm(sgqlc.types.Enum): + """The possible signature algorithms used to sign SAML requests for a + Identity Provider. + + Enumeration Choices: + + * `RSA_SHA1`: RSA-SHA1 + * `RSA_SHA256`: RSA-SHA256 + * `RSA_SHA384`: RSA-SHA384 + * `RSA_SHA512`: RSA-SHA512 + """ + + __schema__ = github_schema + __choices__ = ("RSA_SHA1", "RSA_SHA256", "RSA_SHA384", "RSA_SHA512") + + +class SavedReplyOrderField(sgqlc.types.Enum): + """Properties by which saved reply connections can be ordered. + + Enumeration Choices: + + * `UPDATED_AT`: Order saved reply by when they were updated. + """ + + __schema__ = github_schema + __choices__ = ("UPDATED_AT",) + + +class SearchType(sgqlc.types.Enum): + """Represents the individual results of a search. + + Enumeration Choices: + + * `DISCUSSION`: Returns matching discussions in repositories. + * `ISSUE`: Returns results matching issues in repositories. + * `REPOSITORY`: Returns results matching repositories. + * `USER`: Returns results matching users and organizations on + GitHub. + """ + + __schema__ = github_schema + __choices__ = ("DISCUSSION", "ISSUE", "REPOSITORY", "USER") + + +class SecurityAdvisoryClassification(sgqlc.types.Enum): + """Classification of the advisory. + + Enumeration Choices: + + * `GENERAL`: Classification of general advisories. + * `MALWARE`: Classification of malware advisories. + """ + + __schema__ = github_schema + __choices__ = ("GENERAL", "MALWARE") + + +class SecurityAdvisoryEcosystem(sgqlc.types.Enum): + """The possible ecosystems of a security vulnerability's package. + + Enumeration Choices: + + * `COMPOSER`: PHP packages hosted at packagist.org + * `GO`: Go modules + * `MAVEN`: Java artifacts hosted at the Maven central repository + * `NPM`: JavaScript packages hosted at npmjs.com + * `NUGET`: .NET packages hosted at the NuGet Gallery + * `PIP`: Python packages hosted at PyPI.org + * `RUBYGEMS`: Ruby gems hosted at RubyGems.org + * `RUST`: Rust crates + """ + + __schema__ = github_schema + __choices__ = ("COMPOSER", "GO", "MAVEN", "NPM", "NUGET", "PIP", "RUBYGEMS", "RUST") + + +class SecurityAdvisoryIdentifierType(sgqlc.types.Enum): + """Identifier formats available for advisories. + + Enumeration Choices: + + * `CVE`: Common Vulnerabilities and Exposures Identifier. + * `GHSA`: GitHub Security Advisory ID. + """ + + __schema__ = github_schema + __choices__ = ("CVE", "GHSA") + + +class SecurityAdvisoryOrderField(sgqlc.types.Enum): + """Properties by which security advisory connections can be ordered. + + Enumeration Choices: + + * `PUBLISHED_AT`: Order advisories by publication time + * `UPDATED_AT`: Order advisories by update time + """ + + __schema__ = github_schema + __choices__ = ("PUBLISHED_AT", "UPDATED_AT") + + +class SecurityAdvisorySeverity(sgqlc.types.Enum): + """Severity of the vulnerability. + + Enumeration Choices: + + * `CRITICAL`: Critical. + * `HIGH`: High. + * `LOW`: Low. + * `MODERATE`: Moderate. + """ + + __schema__ = github_schema + __choices__ = ("CRITICAL", "HIGH", "LOW", "MODERATE") + + +class SecurityVulnerabilityOrderField(sgqlc.types.Enum): + """Properties by which security vulnerability connections can be + ordered. + + Enumeration Choices: + + * `UPDATED_AT`: Order vulnerability by update time + """ + + __schema__ = github_schema + __choices__ = ("UPDATED_AT",) + + +class SponsorOrderField(sgqlc.types.Enum): + """Properties by which sponsor connections can be ordered. + + Enumeration Choices: + + * `LOGIN`: Order sponsorable entities by login (username). + * `RELEVANCE`: Order sponsors by their relevance to the viewer. + """ + + __schema__ = github_schema + __choices__ = ("LOGIN", "RELEVANCE") + + +class SponsorableOrderField(sgqlc.types.Enum): + """Properties by which sponsorable connections can be ordered. + + Enumeration Choices: + + * `LOGIN`: Order sponsorable entities by login (username). + """ + + __schema__ = github_schema + __choices__ = ("LOGIN",) + + +class SponsorsActivityAction(sgqlc.types.Enum): + """The possible actions that GitHub Sponsors activities can + represent. + + Enumeration Choices: + + * `CANCELLED_SPONSORSHIP`: The activity was cancelling a + sponsorship. + * `NEW_SPONSORSHIP`: The activity was starting a sponsorship. + * `PENDING_CHANGE`: The activity was scheduling a downgrade or + cancellation. + * `REFUND`: The activity was funds being refunded to the sponsor + or GitHub. + * `SPONSOR_MATCH_DISABLED`: The activity was disabling matching + for a previously matched sponsorship. + * `TIER_CHANGE`: The activity was changing the sponsorship tier, + either directly by the sponsor or by a scheduled/pending change. + """ + + __schema__ = github_schema + __choices__ = ("CANCELLED_SPONSORSHIP", "NEW_SPONSORSHIP", "PENDING_CHANGE", "REFUND", "SPONSOR_MATCH_DISABLED", "TIER_CHANGE") + + +class SponsorsActivityOrderField(sgqlc.types.Enum): + """Properties by which GitHub Sponsors activity connections can be + ordered. + + Enumeration Choices: + + * `TIMESTAMP`: Order activities by when they happened. + """ + + __schema__ = github_schema + __choices__ = ("TIMESTAMP",) + + +class SponsorsActivityPeriod(sgqlc.types.Enum): + """The possible time periods for which Sponsors activities can be + requested. + + Enumeration Choices: + + * `ALL`: Don't restrict the activity to any date range, include + all activity. + * `DAY`: The previous calendar day. + * `MONTH`: The previous thirty days. + * `WEEK`: The previous seven days. + """ + + __schema__ = github_schema + __choices__ = ("ALL", "DAY", "MONTH", "WEEK") + + +class SponsorsGoalKind(sgqlc.types.Enum): + """The different kinds of goals a GitHub Sponsors member can have. + + Enumeration Choices: + + * `MONTHLY_SPONSORSHIP_AMOUNT`: The goal is about getting a + certain amount in USD from sponsorships each month. + * `TOTAL_SPONSORS_COUNT`: The goal is about reaching a certain + number of sponsors. + """ + + __schema__ = github_schema + __choices__ = ("MONTHLY_SPONSORSHIP_AMOUNT", "TOTAL_SPONSORS_COUNT") + + +class SponsorsTierOrderField(sgqlc.types.Enum): + """Properties by which Sponsors tiers connections can be ordered. + + Enumeration Choices: + + * `CREATED_AT`: Order tiers by creation time. + * `MONTHLY_PRICE_IN_CENTS`: Order tiers by their monthly price in + cents + """ + + __schema__ = github_schema + __choices__ = ("CREATED_AT", "MONTHLY_PRICE_IN_CENTS") + + +class SponsorshipNewsletterOrderField(sgqlc.types.Enum): + """Properties by which sponsorship update connections can be ordered. + + Enumeration Choices: + + * `CREATED_AT`: Order sponsorship newsletters by when they were + created. + """ + + __schema__ = github_schema + __choices__ = ("CREATED_AT",) + + +class SponsorshipOrderField(sgqlc.types.Enum): + """Properties by which sponsorship connections can be ordered. + + Enumeration Choices: + + * `CREATED_AT`: Order sponsorship by creation time. + """ + + __schema__ = github_schema + __choices__ = ("CREATED_AT",) + + +class SponsorshipPrivacy(sgqlc.types.Enum): + """The privacy of a sponsorship + + Enumeration Choices: + + * `PRIVATE`: Private + * `PUBLIC`: Public + """ + + __schema__ = github_schema + __choices__ = ("PRIVATE", "PUBLIC") + + +class StarOrderField(sgqlc.types.Enum): + """Properties by which star connections can be ordered. + + Enumeration Choices: + + * `STARRED_AT`: Allows ordering a list of stars by when they were + created. + """ + + __schema__ = github_schema + __choices__ = ("STARRED_AT",) + + +class StatusState(sgqlc.types.Enum): + """The possible commit status states. + + Enumeration Choices: + + * `ERROR`: Status is errored. + * `EXPECTED`: Status is expected. + * `FAILURE`: Status is failing. + * `PENDING`: Status is pending. + * `SUCCESS`: Status is successful. + """ + + __schema__ = github_schema + __choices__ = ("ERROR", "EXPECTED", "FAILURE", "PENDING", "SUCCESS") + + +String = sgqlc.types.String + + +class SubscriptionState(sgqlc.types.Enum): + """The possible states of a subscription. + + Enumeration Choices: + + * `IGNORED`: The User is never notified. + * `SUBSCRIBED`: The User is notified of all conversations. + * `UNSUBSCRIBED`: The User is only notified when participating or + @mentioned. + """ + + __schema__ = github_schema + __choices__ = ("IGNORED", "SUBSCRIBED", "UNSUBSCRIBED") + + +class TeamDiscussionCommentOrderField(sgqlc.types.Enum): + """Properties by which team discussion comment connections can be + ordered. + + Enumeration Choices: + + * `NUMBER`: Allows sequential ordering of team discussion comments + (which is equivalent to chronological ordering). + """ + + __schema__ = github_schema + __choices__ = ("NUMBER",) + + +class TeamDiscussionOrderField(sgqlc.types.Enum): + """Properties by which team discussion connections can be ordered. + + Enumeration Choices: + + * `CREATED_AT`: Allows chronological ordering of team discussions. + """ + + __schema__ = github_schema + __choices__ = ("CREATED_AT",) + + +class TeamMemberOrderField(sgqlc.types.Enum): + """Properties by which team member connections can be ordered. + + Enumeration Choices: + + * `CREATED_AT`: Order team members by creation time + * `LOGIN`: Order team members by login + """ + + __schema__ = github_schema + __choices__ = ("CREATED_AT", "LOGIN") + + +class TeamMemberRole(sgqlc.types.Enum): + """The possible team member roles; either 'maintainer' or 'member'. + + Enumeration Choices: + + * `MAINTAINER`: A team maintainer has permission to add and remove + team members. + * `MEMBER`: A team member has no administrative permissions on the + team. + """ + + __schema__ = github_schema + __choices__ = ("MAINTAINER", "MEMBER") + + +class TeamMembershipType(sgqlc.types.Enum): + """Defines which types of team members are included in the returned + list. Can be one of IMMEDIATE, CHILD_TEAM or ALL. + + Enumeration Choices: + + * `ALL`: Includes immediate and child team members for the team. + * `CHILD_TEAM`: Includes only child team members for the team. + * `IMMEDIATE`: Includes only immediate members of the team. + """ + + __schema__ = github_schema + __choices__ = ("ALL", "CHILD_TEAM", "IMMEDIATE") + + +class TeamOrderField(sgqlc.types.Enum): + """Properties by which team connections can be ordered. + + Enumeration Choices: + + * `NAME`: Allows ordering a list of teams by name. + """ + + __schema__ = github_schema + __choices__ = ("NAME",) + + +class TeamPrivacy(sgqlc.types.Enum): + """The possible team privacy values. + + Enumeration Choices: + + * `SECRET`: A secret team can only be seen by its members. + * `VISIBLE`: A visible team can be seen and @mentioned by every + member of the organization. + """ + + __schema__ = github_schema + __choices__ = ("SECRET", "VISIBLE") + + +class TeamRepositoryOrderField(sgqlc.types.Enum): + """Properties by which team repository connections can be ordered. + + Enumeration Choices: + + * `CREATED_AT`: Order repositories by creation time + * `NAME`: Order repositories by name + * `PERMISSION`: Order repositories by permission + * `PUSHED_AT`: Order repositories by push time + * `STARGAZERS`: Order repositories by number of stargazers + * `UPDATED_AT`: Order repositories by update time + """ + + __schema__ = github_schema + __choices__ = ("CREATED_AT", "NAME", "PERMISSION", "PUSHED_AT", "STARGAZERS", "UPDATED_AT") + + +class TeamRole(sgqlc.types.Enum): + """The role of a user on a team. + + Enumeration Choices: + + * `ADMIN`: User has admin rights on the team. + * `MEMBER`: User is a member of the team. + """ + + __schema__ = github_schema + __choices__ = ("ADMIN", "MEMBER") + + +class TopicSuggestionDeclineReason(sgqlc.types.Enum): + """Reason that the suggested topic is declined. + + Enumeration Choices: + + * `NOT_RELEVANT`: The suggested topic is not relevant to the + repository. + * `PERSONAL_PREFERENCE`: The viewer does not like the suggested + topic. + * `TOO_GENERAL`: The suggested topic is too general for the + repository. + * `TOO_SPECIFIC`: The suggested topic is too specific for the + repository (e.g. #ruby-on-rails-version-4-2-1). + """ + + __schema__ = github_schema + __choices__ = ("NOT_RELEVANT", "PERSONAL_PREFERENCE", "TOO_GENERAL", "TOO_SPECIFIC") + + +class TrackedIssueStates(sgqlc.types.Enum): + """The possible states of a tracked issue. + + Enumeration Choices: + + * `CLOSED`: The tracked issue is closed + * `OPEN`: The tracked issue is open + """ + + __schema__ = github_schema + __choices__ = ("CLOSED", "OPEN") + + +class URI(sgqlc.types.Scalar): + """An RFC 3986, RFC 3987, and RFC 6570 (level 4) compliant URI + string. + """ + + __schema__ = github_schema + + +class UserBlockDuration(sgqlc.types.Enum): + """The possible durations that a user can be blocked for. + + Enumeration Choices: + + * `ONE_DAY`: The user was blocked for 1 day + * `ONE_MONTH`: The user was blocked for 30 days + * `ONE_WEEK`: The user was blocked for 7 days + * `PERMANENT`: The user was blocked permanently + * `THREE_DAYS`: The user was blocked for 3 days + """ + + __schema__ = github_schema + __choices__ = ("ONE_DAY", "ONE_MONTH", "ONE_WEEK", "PERMANENT", "THREE_DAYS") + + +class UserStatusOrderField(sgqlc.types.Enum): + """Properties by which user status connections can be ordered. + + Enumeration Choices: + + * `UPDATED_AT`: Order user statuses by when they were updated. + """ + + __schema__ = github_schema + __choices__ = ("UPDATED_AT",) + + +class VerifiableDomainOrderField(sgqlc.types.Enum): + """Properties by which verifiable domain connections can be ordered. + + Enumeration Choices: + + * `CREATED_AT`: Order verifiable domains by their creation date. + * `DOMAIN`: Order verifiable domains by the domain name. + """ + + __schema__ = github_schema + __choices__ = ("CREATED_AT", "DOMAIN") + + +class X509Certificate(sgqlc.types.Scalar): + """A valid x509 certificate string""" + + __schema__ = github_schema + + +######################################################################## +# Input Objects +######################################################################## +class AbortQueuedMigrationsInput(sgqlc.types.Input): + """Autogenerated input type of AbortQueuedMigrations""" + + __schema__ = github_schema + __field_names__ = ("owner_id", "client_mutation_id") + owner_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="ownerId") + """The ID of the organization that is running the migrations.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class AcceptEnterpriseAdministratorInvitationInput(sgqlc.types.Input): + """Autogenerated input type of + AcceptEnterpriseAdministratorInvitation + """ + + __schema__ = github_schema + __field_names__ = ("invitation_id", "client_mutation_id") + invitation_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="invitationId") + """The id of the invitation being accepted""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class AcceptTopicSuggestionInput(sgqlc.types.Input): + """Autogenerated input type of AcceptTopicSuggestion""" + + __schema__ = github_schema + __field_names__ = ("repository_id", "name", "client_mutation_id") + repository_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="repositoryId") + """The Node ID of the repository.""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The name of the suggested topic.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class AddAssigneesToAssignableInput(sgqlc.types.Input): + """Autogenerated input type of AddAssigneesToAssignable""" + + __schema__ = github_schema + __field_names__ = ("assignable_id", "assignee_ids", "client_mutation_id") + assignable_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="assignableId") + """The id of the assignable object to add assignees to.""" + + assignee_ids = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(ID))), graphql_name="assigneeIds") + """The id of users to add as assignees.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class AddCommentInput(sgqlc.types.Input): + """Autogenerated input type of AddComment""" + + __schema__ = github_schema + __field_names__ = ("subject_id", "body", "client_mutation_id") + subject_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="subjectId") + """The Node ID of the subject to modify.""" + + body = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="body") + """The contents of the comment.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class AddDiscussionCommentInput(sgqlc.types.Input): + """Autogenerated input type of AddDiscussionComment""" + + __schema__ = github_schema + __field_names__ = ("discussion_id", "reply_to_id", "body", "client_mutation_id") + discussion_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="discussionId") + """The Node ID of the discussion to comment on.""" + + reply_to_id = sgqlc.types.Field(ID, graphql_name="replyToId") + """The Node ID of the discussion comment within this discussion to + reply to. + """ + + body = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="body") + """The contents of the comment.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class AddDiscussionPollVoteInput(sgqlc.types.Input): + """Autogenerated input type of AddDiscussionPollVote""" + + __schema__ = github_schema + __field_names__ = ("poll_option_id", "client_mutation_id") + poll_option_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="pollOptionId") + """The Node ID of the discussion poll option to vote for.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class AddEnterpriseSupportEntitlementInput(sgqlc.types.Input): + """Autogenerated input type of AddEnterpriseSupportEntitlement""" + + __schema__ = github_schema + __field_names__ = ("enterprise_id", "login", "client_mutation_id") + enterprise_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="enterpriseId") + """The ID of the Enterprise which the admin belongs to.""" + + login = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="login") + """The login of a member who will receive the support entitlement.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class AddLabelsToLabelableInput(sgqlc.types.Input): + """Autogenerated input type of AddLabelsToLabelable""" + + __schema__ = github_schema + __field_names__ = ("labelable_id", "label_ids", "client_mutation_id") + labelable_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="labelableId") + """The id of the labelable object to add labels to.""" + + label_ids = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(ID))), graphql_name="labelIds") + """The ids of the labels to add.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class AddProjectCardInput(sgqlc.types.Input): + """Autogenerated input type of AddProjectCard""" + + __schema__ = github_schema + __field_names__ = ("project_column_id", "content_id", "note", "client_mutation_id") + project_column_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="projectColumnId") + """The Node ID of the ProjectColumn.""" + + content_id = sgqlc.types.Field(ID, graphql_name="contentId") + """The content of the card. Must be a member of the ProjectCardItem + union + """ + + note = sgqlc.types.Field(String, graphql_name="note") + """The note on the card.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class AddProjectColumnInput(sgqlc.types.Input): + """Autogenerated input type of AddProjectColumn""" + + __schema__ = github_schema + __field_names__ = ("project_id", "name", "client_mutation_id") + project_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="projectId") + """The Node ID of the project.""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The name of the column.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class AddProjectDraftIssueInput(sgqlc.types.Input): + """Autogenerated input type of AddProjectDraftIssue""" + + __schema__ = github_schema + __field_names__ = ("project_id", "title", "body", "assignee_ids", "client_mutation_id") + project_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="projectId") + """The ID of the Project to add the draft issue to.""" + + title = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="title") + """The title of the draft issue.""" + + body = sgqlc.types.Field(String, graphql_name="body") + """The body of the draft issue.""" + + assignee_ids = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(ID)), graphql_name="assigneeIds") + """The IDs of the assignees of the draft issue.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class AddProjectNextItemInput(sgqlc.types.Input): + """Autogenerated input type of AddProjectNextItem""" + + __schema__ = github_schema + __field_names__ = ("project_id", "content_id", "client_mutation_id") + project_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="projectId") + """The ID of the Project to add the item to.""" + + content_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="contentId") + """The content id of the item (Issue or PullRequest).""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class AddPullRequestReviewCommentInput(sgqlc.types.Input): + """Autogenerated input type of AddPullRequestReviewComment""" + + __schema__ = github_schema + __field_names__ = ( + "pull_request_id", + "pull_request_review_id", + "commit_oid", + "body", + "path", + "position", + "in_reply_to", + "client_mutation_id", + ) + pull_request_id = sgqlc.types.Field(ID, graphql_name="pullRequestId") + """The node ID of the pull request reviewing""" + + pull_request_review_id = sgqlc.types.Field(ID, graphql_name="pullRequestReviewId") + """The Node ID of the review to modify.""" + + commit_oid = sgqlc.types.Field(GitObjectID, graphql_name="commitOID") + """The SHA of the commit to comment on.""" + + body = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="body") + """The text of the comment.""" + + path = sgqlc.types.Field(String, graphql_name="path") + """The relative path of the file to comment on.""" + + position = sgqlc.types.Field(Int, graphql_name="position") + """The line index in the diff to comment on.""" + + in_reply_to = sgqlc.types.Field(ID, graphql_name="inReplyTo") + """The comment id to reply to.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class AddPullRequestReviewInput(sgqlc.types.Input): + """Autogenerated input type of AddPullRequestReview""" + + __schema__ = github_schema + __field_names__ = ("pull_request_id", "commit_oid", "body", "event", "comments", "threads", "client_mutation_id") + pull_request_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="pullRequestId") + """The Node ID of the pull request to modify.""" + + commit_oid = sgqlc.types.Field(GitObjectID, graphql_name="commitOID") + """The commit OID the review pertains to.""" + + body = sgqlc.types.Field(String, graphql_name="body") + """The contents of the review body comment.""" + + event = sgqlc.types.Field(PullRequestReviewEvent, graphql_name="event") + """The event to perform on the pull request review.""" + + comments = sgqlc.types.Field(sgqlc.types.list_of("DraftPullRequestReviewComment"), graphql_name="comments") + """The review line comments.""" + + threads = sgqlc.types.Field(sgqlc.types.list_of("DraftPullRequestReviewThread"), graphql_name="threads") + """The review line comment threads.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class AddPullRequestReviewThreadInput(sgqlc.types.Input): + """Autogenerated input type of AddPullRequestReviewThread""" + + __schema__ = github_schema + __field_names__ = ( + "path", + "body", + "pull_request_id", + "pull_request_review_id", + "line", + "side", + "start_line", + "start_side", + "client_mutation_id", + ) + path = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="path") + """Path to the file being commented on.""" + + body = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="body") + """Body of the thread's first comment.""" + + pull_request_id = sgqlc.types.Field(ID, graphql_name="pullRequestId") + """The node ID of the pull request reviewing""" + + pull_request_review_id = sgqlc.types.Field(ID, graphql_name="pullRequestReviewId") + """The Node ID of the review to modify.""" + + line = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="line") + """The line of the blob to which the thread refers. The end of the + line range for multi-line comments. + """ + + side = sgqlc.types.Field(DiffSide, graphql_name="side") + """The side of the diff on which the line resides. For multi-line + comments, this is the side for the end of the line range. + """ + + start_line = sgqlc.types.Field(Int, graphql_name="startLine") + """The first line of the range to which the comment refers.""" + + start_side = sgqlc.types.Field(DiffSide, graphql_name="startSide") + """The side of the diff on which the start line resides.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class AddReactionInput(sgqlc.types.Input): + """Autogenerated input type of AddReaction""" + + __schema__ = github_schema + __field_names__ = ("subject_id", "content", "client_mutation_id") + subject_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="subjectId") + """The Node ID of the subject to modify.""" + + content = sgqlc.types.Field(sgqlc.types.non_null(ReactionContent), graphql_name="content") + """The name of the emoji to react with.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class AddStarInput(sgqlc.types.Input): + """Autogenerated input type of AddStar""" + + __schema__ = github_schema + __field_names__ = ("starrable_id", "client_mutation_id") + starrable_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="starrableId") + """The Starrable ID to star.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class AddUpvoteInput(sgqlc.types.Input): + """Autogenerated input type of AddUpvote""" + + __schema__ = github_schema + __field_names__ = ("subject_id", "client_mutation_id") + subject_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="subjectId") + """The Node ID of the discussion or comment to upvote.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class AddVerifiableDomainInput(sgqlc.types.Input): + """Autogenerated input type of AddVerifiableDomain""" + + __schema__ = github_schema + __field_names__ = ("owner_id", "domain", "client_mutation_id") + owner_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="ownerId") + """The ID of the owner to add the domain to""" + + domain = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="domain") + """The URL of the domain""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class ApproveDeploymentsInput(sgqlc.types.Input): + """Autogenerated input type of ApproveDeployments""" + + __schema__ = github_schema + __field_names__ = ("workflow_run_id", "environment_ids", "comment", "client_mutation_id") + workflow_run_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="workflowRunId") + """The node ID of the workflow run containing the pending + deployments. + """ + + environment_ids = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(ID))), graphql_name="environmentIds") + """The ids of environments to reject deployments""" + + comment = sgqlc.types.Field(String, graphql_name="comment") + """Optional comment for approving deployments""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class ApproveVerifiableDomainInput(sgqlc.types.Input): + """Autogenerated input type of ApproveVerifiableDomain""" + + __schema__ = github_schema + __field_names__ = ("id", "client_mutation_id") + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + """The ID of the verifiable domain to approve.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class ArchiveRepositoryInput(sgqlc.types.Input): + """Autogenerated input type of ArchiveRepository""" + + __schema__ = github_schema + __field_names__ = ("repository_id", "client_mutation_id") + repository_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="repositoryId") + """The ID of the repository to mark as archived.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class AuditLogOrder(sgqlc.types.Input): + """Ordering options for Audit Log connections.""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(AuditLogOrderField, graphql_name="field") + """The field to order Audit Logs by.""" + + direction = sgqlc.types.Field(OrderDirection, graphql_name="direction") + """The ordering direction.""" + + +class CancelEnterpriseAdminInvitationInput(sgqlc.types.Input): + """Autogenerated input type of CancelEnterpriseAdminInvitation""" + + __schema__ = github_schema + __field_names__ = ("invitation_id", "client_mutation_id") + invitation_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="invitationId") + """The Node ID of the pending enterprise administrator invitation.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class CancelSponsorshipInput(sgqlc.types.Input): + """Autogenerated input type of CancelSponsorship""" + + __schema__ = github_schema + __field_names__ = ("sponsor_id", "sponsor_login", "sponsorable_id", "sponsorable_login", "client_mutation_id") + sponsor_id = sgqlc.types.Field(ID, graphql_name="sponsorId") + """The ID of the user or organization who is acting as the sponsor, + paying for the sponsorship. Required if sponsorLogin is not given. + """ + + sponsor_login = sgqlc.types.Field(String, graphql_name="sponsorLogin") + """The username of the user or organization who is acting as the + sponsor, paying for the sponsorship. Required if sponsorId is not + given. + """ + + sponsorable_id = sgqlc.types.Field(ID, graphql_name="sponsorableId") + """The ID of the user or organization who is receiving the + sponsorship. Required if sponsorableLogin is not given. + """ + + sponsorable_login = sgqlc.types.Field(String, graphql_name="sponsorableLogin") + """The username of the user or organization who is receiving the + sponsorship. Required if sponsorableId is not given. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class ChangeUserStatusInput(sgqlc.types.Input): + """Autogenerated input type of ChangeUserStatus""" + + __schema__ = github_schema + __field_names__ = ("emoji", "message", "organization_id", "limited_availability", "expires_at", "client_mutation_id") + emoji = sgqlc.types.Field(String, graphql_name="emoji") + """The emoji to represent your status. Can either be a native Unicode + emoji or an emoji name with colons, e.g., :grinning:. + """ + + message = sgqlc.types.Field(String, graphql_name="message") + """A short description of your current status.""" + + organization_id = sgqlc.types.Field(ID, graphql_name="organizationId") + """The ID of the organization whose members will be allowed to see + the status. If omitted, the status will be publicly visible. + """ + + limited_availability = sgqlc.types.Field(Boolean, graphql_name="limitedAvailability") + """Whether this status should indicate you are not fully available on + GitHub, e.g., you are away. + """ + + expires_at = sgqlc.types.Field(DateTime, graphql_name="expiresAt") + """If set, the user status will not be shown after this date.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class CheckAnnotationData(sgqlc.types.Input): + """Information from a check run analysis to specific lines of code.""" + + __schema__ = github_schema + __field_names__ = ("path", "location", "annotation_level", "message", "title", "raw_details") + path = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="path") + """The path of the file to add an annotation to.""" + + location = sgqlc.types.Field(sgqlc.types.non_null("CheckAnnotationRange"), graphql_name="location") + """The location of the annotation""" + + annotation_level = sgqlc.types.Field(sgqlc.types.non_null(CheckAnnotationLevel), graphql_name="annotationLevel") + """Represents an annotation's information level""" + + message = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="message") + """A short description of the feedback for these lines of code.""" + + title = sgqlc.types.Field(String, graphql_name="title") + """The title that represents the annotation.""" + + raw_details = sgqlc.types.Field(String, graphql_name="rawDetails") + """Details about this annotation.""" + + +class CheckAnnotationRange(sgqlc.types.Input): + """Information from a check run analysis to specific lines of code.""" + + __schema__ = github_schema + __field_names__ = ("start_line", "start_column", "end_line", "end_column") + start_line = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="startLine") + """The starting line of the range.""" + + start_column = sgqlc.types.Field(Int, graphql_name="startColumn") + """The starting column of the range.""" + + end_line = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="endLine") + """The ending line of the range.""" + + end_column = sgqlc.types.Field(Int, graphql_name="endColumn") + """The ending column of the range.""" + + +class CheckRunAction(sgqlc.types.Input): + """Possible further actions the integrator can perform.""" + + __schema__ = github_schema + __field_names__ = ("label", "description", "identifier") + label = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="label") + """The text to be displayed on a button in the web UI.""" + + description = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="description") + """A short explanation of what this action would do.""" + + identifier = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="identifier") + """A reference for the action on the integrator's system.""" + + +class CheckRunFilter(sgqlc.types.Input): + """The filters that are available when fetching check runs.""" + + __schema__ = github_schema + __field_names__ = ("check_type", "app_id", "check_name", "status") + check_type = sgqlc.types.Field(CheckRunType, graphql_name="checkType") + """Filters the check runs by this type.""" + + app_id = sgqlc.types.Field(Int, graphql_name="appId") + """Filters the check runs created by this application ID.""" + + check_name = sgqlc.types.Field(String, graphql_name="checkName") + """Filters the check runs by this name.""" + + status = sgqlc.types.Field(CheckStatusState, graphql_name="status") + """Filters the check runs by this status.""" + + +class CheckRunOutput(sgqlc.types.Input): + """Descriptive details about the check run.""" + + __schema__ = github_schema + __field_names__ = ("title", "summary", "text", "annotations", "images") + title = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="title") + """A title to provide for this check run.""" + + summary = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="summary") + """The summary of the check run (supports Commonmark).""" + + text = sgqlc.types.Field(String, graphql_name="text") + """The details of the check run (supports Commonmark).""" + + annotations = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(CheckAnnotationData)), graphql_name="annotations") + """The annotations that are made as part of the check run.""" + + images = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null("CheckRunOutputImage")), graphql_name="images") + """Images attached to the check run output displayed in the GitHub + pull request UI. + """ + + +class CheckRunOutputImage(sgqlc.types.Input): + """Images attached to the check run output displayed in the GitHub + pull request UI. + """ + + __schema__ = github_schema + __field_names__ = ("alt", "image_url", "caption") + alt = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="alt") + """The alternative text for the image.""" + + image_url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="imageUrl") + """The full URL of the image.""" + + caption = sgqlc.types.Field(String, graphql_name="caption") + """A short image description.""" + + +class CheckSuiteAutoTriggerPreference(sgqlc.types.Input): + """The auto-trigger preferences that are available for check suites.""" + + __schema__ = github_schema + __field_names__ = ("app_id", "setting") + app_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="appId") + """The node ID of the application that owns the check suite.""" + + setting = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="setting") + """Set to `true` to enable automatic creation of CheckSuite events + upon pushes to the repository. + """ + + +class CheckSuiteFilter(sgqlc.types.Input): + """The filters that are available when fetching check suites.""" + + __schema__ = github_schema + __field_names__ = ("app_id", "check_name") + app_id = sgqlc.types.Field(Int, graphql_name="appId") + """Filters the check suites created by this application ID.""" + + check_name = sgqlc.types.Field(String, graphql_name="checkName") + """Filters the check suites by this name.""" + + +class ClearLabelsFromLabelableInput(sgqlc.types.Input): + """Autogenerated input type of ClearLabelsFromLabelable""" + + __schema__ = github_schema + __field_names__ = ("labelable_id", "client_mutation_id") + labelable_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="labelableId") + """The id of the labelable object to clear the labels from.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class CloneProjectInput(sgqlc.types.Input): + """Autogenerated input type of CloneProject""" + + __schema__ = github_schema + __field_names__ = ("target_owner_id", "source_id", "include_workflows", "name", "body", "public", "client_mutation_id") + target_owner_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="targetOwnerId") + """The owner ID to create the project under.""" + + source_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="sourceId") + """The source project to clone.""" + + include_workflows = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="includeWorkflows") + """Whether or not to clone the source project's workflows.""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The name of the project.""" + + body = sgqlc.types.Field(String, graphql_name="body") + """The description of the project.""" + + public = sgqlc.types.Field(Boolean, graphql_name="public") + """The visibility of the project, defaults to false (private).""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class CloneTemplateRepositoryInput(sgqlc.types.Input): + """Autogenerated input type of CloneTemplateRepository""" + + __schema__ = github_schema + __field_names__ = ("repository_id", "name", "owner_id", "description", "visibility", "include_all_branches", "client_mutation_id") + repository_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="repositoryId") + """The Node ID of the template repository.""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The name of the new repository.""" + + owner_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="ownerId") + """The ID of the owner for the new repository.""" + + description = sgqlc.types.Field(String, graphql_name="description") + """A short description of the new repository.""" + + visibility = sgqlc.types.Field(sgqlc.types.non_null(RepositoryVisibility), graphql_name="visibility") + """Indicates the repository's visibility level.""" + + include_all_branches = sgqlc.types.Field(Boolean, graphql_name="includeAllBranches") + """Whether to copy all branches from the template to the new + repository. Defaults to copying only the default branch of the + template. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class CloseIssueInput(sgqlc.types.Input): + """Autogenerated input type of CloseIssue""" + + __schema__ = github_schema + __field_names__ = ("issue_id", "state_reason", "client_mutation_id") + issue_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="issueId") + """ID of the issue to be closed.""" + + state_reason = sgqlc.types.Field(IssueClosedStateReason, graphql_name="stateReason") + """The reason the issue is to be closed.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class ClosePullRequestInput(sgqlc.types.Input): + """Autogenerated input type of ClosePullRequest""" + + __schema__ = github_schema + __field_names__ = ("pull_request_id", "client_mutation_id") + pull_request_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="pullRequestId") + """ID of the pull request to be closed.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class CommitAuthor(sgqlc.types.Input): + """Specifies an author for filtering Git commits.""" + + __schema__ = github_schema + __field_names__ = ("id", "emails") + id = sgqlc.types.Field(ID, graphql_name="id") + """ID of a User to filter by. If non-null, only commits authored by + this user will be returned. This field takes precedence over + emails. + """ + + emails = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(String)), graphql_name="emails") + """Email addresses to filter by. Commits authored by any of the + specified email addresses will be returned. + """ + + +class CommitContributionOrder(sgqlc.types.Input): + """Ordering options for commit contribution connections.""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(CommitContributionOrderField), graphql_name="field") + """The field by which to order commit contributions.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The ordering direction.""" + + +class CommitMessage(sgqlc.types.Input): + """A message to include with a new commit""" + + __schema__ = github_schema + __field_names__ = ("headline", "body") + headline = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="headline") + """The headline of the message.""" + + body = sgqlc.types.Field(String, graphql_name="body") + """The body of the message.""" + + +class CommittableBranch(sgqlc.types.Input): + """A git ref for a commit to be appended to. The ref must be a + branch, i.e. its fully qualified name must start with + `refs/heads/` (although the input is not required to be fully + qualified). The Ref may be specified by its global node ID or by + the repository nameWithOwner and branch name. ### Examples + Specify a branch using a global node ID: { "id": + "MDM6UmVmMTpyZWZzL2hlYWRzL21haW4=" } Specify a branch using + nameWithOwner and branch name: { "nameWithOwner": + "github/graphql-client", "branchName": "main" } + """ + + __schema__ = github_schema + __field_names__ = ("id", "repository_name_with_owner", "branch_name") + id = sgqlc.types.Field(ID, graphql_name="id") + """The Node ID of the Ref to be updated.""" + + repository_name_with_owner = sgqlc.types.Field(String, graphql_name="repositoryNameWithOwner") + """The nameWithOwner of the repository to commit to.""" + + branch_name = sgqlc.types.Field(String, graphql_name="branchName") + """The unqualified name of the branch to append the commit to.""" + + +class ContributionOrder(sgqlc.types.Input): + """Ordering options for contribution connections.""" + + __schema__ = github_schema + __field_names__ = ("direction",) + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The ordering direction.""" + + +class ConvertProjectCardNoteToIssueInput(sgqlc.types.Input): + """Autogenerated input type of ConvertProjectCardNoteToIssue""" + + __schema__ = github_schema + __field_names__ = ("project_card_id", "repository_id", "title", "body", "client_mutation_id") + project_card_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="projectCardId") + """The ProjectCard ID to convert.""" + + repository_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="repositoryId") + """The ID of the repository to create the issue in.""" + + title = sgqlc.types.Field(String, graphql_name="title") + """The title of the newly created issue. Defaults to the card's note + text. + """ + + body = sgqlc.types.Field(String, graphql_name="body") + """The body of the newly created issue.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class ConvertPullRequestToDraftInput(sgqlc.types.Input): + """Autogenerated input type of ConvertPullRequestToDraft""" + + __schema__ = github_schema + __field_names__ = ("pull_request_id", "client_mutation_id") + pull_request_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="pullRequestId") + """ID of the pull request to convert to draft""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class CreateBranchProtectionRuleInput(sgqlc.types.Input): + """Autogenerated input type of CreateBranchProtectionRule""" + + __schema__ = github_schema + __field_names__ = ( + "repository_id", + "pattern", + "requires_approving_reviews", + "required_approving_review_count", + "requires_commit_signatures", + "requires_linear_history", + "blocks_creations", + "allows_force_pushes", + "allows_deletions", + "is_admin_enforced", + "requires_status_checks", + "requires_strict_status_checks", + "requires_code_owner_reviews", + "dismisses_stale_reviews", + "restricts_review_dismissals", + "review_dismissal_actor_ids", + "bypass_pull_request_actor_ids", + "bypass_force_push_actor_ids", + "restricts_pushes", + "push_actor_ids", + "required_status_check_contexts", + "required_status_checks", + "requires_conversation_resolution", + "client_mutation_id", + ) + repository_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="repositoryId") + """The global relay id of the repository in which a new branch + protection rule should be created in. + """ + + pattern = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="pattern") + """The glob-like pattern used to determine matching branches.""" + + requires_approving_reviews = sgqlc.types.Field(Boolean, graphql_name="requiresApprovingReviews") + """Are approving reviews required to update matching branches.""" + + required_approving_review_count = sgqlc.types.Field(Int, graphql_name="requiredApprovingReviewCount") + """Number of approving reviews required to update matching branches.""" + + requires_commit_signatures = sgqlc.types.Field(Boolean, graphql_name="requiresCommitSignatures") + """Are commits required to be signed.""" + + requires_linear_history = sgqlc.types.Field(Boolean, graphql_name="requiresLinearHistory") + """Are merge commits prohibited from being pushed to this branch.""" + + blocks_creations = sgqlc.types.Field(Boolean, graphql_name="blocksCreations") + """Is branch creation a protected operation.""" + + allows_force_pushes = sgqlc.types.Field(Boolean, graphql_name="allowsForcePushes") + """Are force pushes allowed on this branch.""" + + allows_deletions = sgqlc.types.Field(Boolean, graphql_name="allowsDeletions") + """Can this branch be deleted.""" + + is_admin_enforced = sgqlc.types.Field(Boolean, graphql_name="isAdminEnforced") + """Can admins overwrite branch protection.""" + + requires_status_checks = sgqlc.types.Field(Boolean, graphql_name="requiresStatusChecks") + """Are status checks required to update matching branches.""" + + requires_strict_status_checks = sgqlc.types.Field(Boolean, graphql_name="requiresStrictStatusChecks") + """Are branches required to be up to date before merging.""" + + requires_code_owner_reviews = sgqlc.types.Field(Boolean, graphql_name="requiresCodeOwnerReviews") + """Are reviews from code owners required to update matching branches.""" + + dismisses_stale_reviews = sgqlc.types.Field(Boolean, graphql_name="dismissesStaleReviews") + """Will new commits pushed to matching branches dismiss pull request + review approvals. + """ + + restricts_review_dismissals = sgqlc.types.Field(Boolean, graphql_name="restrictsReviewDismissals") + """Is dismissal of pull request reviews restricted.""" + + review_dismissal_actor_ids = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(ID)), graphql_name="reviewDismissalActorIds") + """A list of User, Team, or App IDs allowed to dismiss reviews on + pull requests targeting matching branches. + """ + + bypass_pull_request_actor_ids = sgqlc.types.Field( + sgqlc.types.list_of(sgqlc.types.non_null(ID)), graphql_name="bypassPullRequestActorIds" + ) + """A list of User, Team, or App IDs allowed to bypass pull requests + targeting matching branches. + """ + + bypass_force_push_actor_ids = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(ID)), graphql_name="bypassForcePushActorIds") + """A list of User, Team, or App IDs allowed to bypass force push + targeting matching branches. + """ + + restricts_pushes = sgqlc.types.Field(Boolean, graphql_name="restrictsPushes") + """Is pushing to matching branches restricted.""" + + push_actor_ids = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(ID)), graphql_name="pushActorIds") + """A list of User, Team, or App IDs allowed to push to matching + branches. + """ + + required_status_check_contexts = sgqlc.types.Field( + sgqlc.types.list_of(sgqlc.types.non_null(String)), graphql_name="requiredStatusCheckContexts" + ) + """List of required status check contexts that must pass for commits + to be accepted to matching branches. + """ + + required_status_checks = sgqlc.types.Field( + sgqlc.types.list_of(sgqlc.types.non_null("RequiredStatusCheckInput")), graphql_name="requiredStatusChecks" + ) + """The list of required status checks""" + + requires_conversation_resolution = sgqlc.types.Field(Boolean, graphql_name="requiresConversationResolution") + """Are conversations required to be resolved before merging.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class CreateCheckRunInput(sgqlc.types.Input): + """Autogenerated input type of CreateCheckRun""" + + __schema__ = github_schema + __field_names__ = ( + "repository_id", + "name", + "head_sha", + "details_url", + "external_id", + "status", + "started_at", + "conclusion", + "completed_at", + "output", + "actions", + "client_mutation_id", + ) + repository_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="repositoryId") + """The node ID of the repository.""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The name of the check.""" + + head_sha = sgqlc.types.Field(sgqlc.types.non_null(GitObjectID), graphql_name="headSha") + """The SHA of the head commit.""" + + details_url = sgqlc.types.Field(URI, graphql_name="detailsUrl") + """The URL of the integrator's site that has the full details of the + check. + """ + + external_id = sgqlc.types.Field(String, graphql_name="externalId") + """A reference for the run on the integrator's system.""" + + status = sgqlc.types.Field(RequestableCheckStatusState, graphql_name="status") + """The current status.""" + + started_at = sgqlc.types.Field(DateTime, graphql_name="startedAt") + """The time that the check run began.""" + + conclusion = sgqlc.types.Field(CheckConclusionState, graphql_name="conclusion") + """The final conclusion of the check.""" + + completed_at = sgqlc.types.Field(DateTime, graphql_name="completedAt") + """The time that the check run finished.""" + + output = sgqlc.types.Field(CheckRunOutput, graphql_name="output") + """Descriptive details about the run.""" + + actions = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(CheckRunAction)), graphql_name="actions") + """Possible further actions the integrator can perform, which a user + may trigger. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class CreateCheckSuiteInput(sgqlc.types.Input): + """Autogenerated input type of CreateCheckSuite""" + + __schema__ = github_schema + __field_names__ = ("repository_id", "head_sha", "client_mutation_id") + repository_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="repositoryId") + """The Node ID of the repository.""" + + head_sha = sgqlc.types.Field(sgqlc.types.non_null(GitObjectID), graphql_name="headSha") + """The SHA of the head commit.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class CreateCommitOnBranchInput(sgqlc.types.Input): + """Autogenerated input type of CreateCommitOnBranch""" + + __schema__ = github_schema + __field_names__ = ("branch", "file_changes", "message", "expected_head_oid", "client_mutation_id") + branch = sgqlc.types.Field(sgqlc.types.non_null(CommittableBranch), graphql_name="branch") + """The Ref to be updated. Must be a branch.""" + + file_changes = sgqlc.types.Field("FileChanges", graphql_name="fileChanges") + """A description of changes to files in this commit.""" + + message = sgqlc.types.Field(sgqlc.types.non_null(CommitMessage), graphql_name="message") + """The commit message the be included with the commit.""" + + expected_head_oid = sgqlc.types.Field(sgqlc.types.non_null(GitObjectID), graphql_name="expectedHeadOid") + """The git commit oid expected at the head of the branch prior to the + commit + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class CreateDiscussionInput(sgqlc.types.Input): + """Autogenerated input type of CreateDiscussion""" + + __schema__ = github_schema + __field_names__ = ("repository_id", "title", "body", "category_id", "client_mutation_id") + repository_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="repositoryId") + """The id of the repository on which to create the discussion.""" + + title = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="title") + """The title of the discussion.""" + + body = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="body") + """The body of the discussion.""" + + category_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="categoryId") + """The id of the discussion category to associate with this + discussion. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class CreateEnterpriseOrganizationInput(sgqlc.types.Input): + """Autogenerated input type of CreateEnterpriseOrganization""" + + __schema__ = github_schema + __field_names__ = ("enterprise_id", "login", "profile_name", "billing_email", "admin_logins", "client_mutation_id") + enterprise_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="enterpriseId") + """The ID of the enterprise owning the new organization.""" + + login = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="login") + """The login of the new organization.""" + + profile_name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="profileName") + """The profile name of the new organization.""" + + billing_email = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="billingEmail") + """The email used for sending billing receipts.""" + + admin_logins = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(String))), graphql_name="adminLogins") + """The logins for the administrators of the new organization.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class CreateEnvironmentInput(sgqlc.types.Input): + """Autogenerated input type of CreateEnvironment""" + + __schema__ = github_schema + __field_names__ = ("repository_id", "name", "client_mutation_id") + repository_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="repositoryId") + """The node ID of the repository.""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The name of the environment.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class CreateIpAllowListEntryInput(sgqlc.types.Input): + """Autogenerated input type of CreateIpAllowListEntry""" + + __schema__ = github_schema + __field_names__ = ("owner_id", "allow_list_value", "name", "is_active", "client_mutation_id") + owner_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="ownerId") + """The ID of the owner for which to create the new IP allow list + entry. + """ + + allow_list_value = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="allowListValue") + """An IP address or range of addresses in CIDR notation.""" + + name = sgqlc.types.Field(String, graphql_name="name") + """An optional name for the IP allow list entry.""" + + is_active = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isActive") + """Whether the IP allow list entry is active when an IP allow list is + enabled. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class CreateIssueInput(sgqlc.types.Input): + """Autogenerated input type of CreateIssue""" + + __schema__ = github_schema + __field_names__ = ( + "repository_id", + "title", + "body", + "assignee_ids", + "milestone_id", + "label_ids", + "project_ids", + "issue_template", + "client_mutation_id", + ) + repository_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="repositoryId") + """The Node ID of the repository.""" + + title = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="title") + """The title for the issue.""" + + body = sgqlc.types.Field(String, graphql_name="body") + """The body for the issue description.""" + + assignee_ids = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(ID)), graphql_name="assigneeIds") + """The Node ID for the user assignee for this issue.""" + + milestone_id = sgqlc.types.Field(ID, graphql_name="milestoneId") + """The Node ID of the milestone for this issue.""" + + label_ids = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(ID)), graphql_name="labelIds") + """An array of Node IDs of labels for this issue.""" + + project_ids = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(ID)), graphql_name="projectIds") + """An array of Node IDs for projects associated with this issue.""" + + issue_template = sgqlc.types.Field(String, graphql_name="issueTemplate") + """The name of an issue template in the repository, assigns labels + and assignees from the template to the issue + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class CreateMigrationSourceInput(sgqlc.types.Input): + """Autogenerated input type of CreateMigrationSource""" + + __schema__ = github_schema + __field_names__ = ("name", "url", "access_token", "type", "owner_id", "github_pat", "client_mutation_id") + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The Octoshift migration source name.""" + + url = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="url") + """The Octoshift migration source URL.""" + + access_token = sgqlc.types.Field(String, graphql_name="accessToken") + """The Octoshift migration source access token.""" + + type = sgqlc.types.Field(sgqlc.types.non_null(MigrationSourceType), graphql_name="type") + """The Octoshift migration source type.""" + + owner_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="ownerId") + """The ID of the organization that will own the Octoshift migration + source. + """ + + github_pat = sgqlc.types.Field(String, graphql_name="githubPat") + """The GitHub personal access token of the user importing to the + target repository. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class CreateProjectInput(sgqlc.types.Input): + """Autogenerated input type of CreateProject""" + + __schema__ = github_schema + __field_names__ = ("owner_id", "name", "body", "template", "repository_ids", "client_mutation_id") + owner_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="ownerId") + """The owner ID to create the project under.""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The name of project.""" + + body = sgqlc.types.Field(String, graphql_name="body") + """The description of project.""" + + template = sgqlc.types.Field(ProjectTemplate, graphql_name="template") + """The name of the GitHub-provided template.""" + + repository_ids = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(ID)), graphql_name="repositoryIds") + """A list of repository IDs to create as linked repositories for the + project + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class CreatePullRequestInput(sgqlc.types.Input): + """Autogenerated input type of CreatePullRequest""" + + __schema__ = github_schema + __field_names__ = ( + "repository_id", + "base_ref_name", + "head_ref_name", + "title", + "body", + "maintainer_can_modify", + "draft", + "client_mutation_id", + ) + repository_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="repositoryId") + """The Node ID of the repository.""" + + base_ref_name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="baseRefName") + """The name of the branch you want your changes pulled into. This + should be an existing branch on the current repository. You cannot + update the base branch on a pull request to point to another + repository. + """ + + head_ref_name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="headRefName") + """The name of the branch where your changes are implemented. For + cross-repository pull requests in the same network, namespace + `head_ref_name` with a user like this: `username:branch`. + """ + + title = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="title") + """The title of the pull request.""" + + body = sgqlc.types.Field(String, graphql_name="body") + """The contents of the pull request.""" + + maintainer_can_modify = sgqlc.types.Field(Boolean, graphql_name="maintainerCanModify") + """Indicates whether maintainers can modify the pull request.""" + + draft = sgqlc.types.Field(Boolean, graphql_name="draft") + """Indicates whether this pull request should be a draft.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class CreateRefInput(sgqlc.types.Input): + """Autogenerated input type of CreateRef""" + + __schema__ = github_schema + __field_names__ = ("repository_id", "name", "oid", "client_mutation_id") + repository_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="repositoryId") + """The Node ID of the Repository to create the Ref in.""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The fully qualified name of the new Ref (ie: + `refs/heads/my_new_branch`). + """ + + oid = sgqlc.types.Field(sgqlc.types.non_null(GitObjectID), graphql_name="oid") + """The GitObjectID that the new Ref shall target. Must point to a + commit. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class CreateRepositoryInput(sgqlc.types.Input): + """Autogenerated input type of CreateRepository""" + + __schema__ = github_schema + __field_names__ = ( + "name", + "owner_id", + "description", + "visibility", + "template", + "homepage_url", + "has_wiki_enabled", + "has_issues_enabled", + "team_id", + "client_mutation_id", + ) + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The name of the new repository.""" + + owner_id = sgqlc.types.Field(ID, graphql_name="ownerId") + """The ID of the owner for the new repository.""" + + description = sgqlc.types.Field(String, graphql_name="description") + """A short description of the new repository.""" + + visibility = sgqlc.types.Field(sgqlc.types.non_null(RepositoryVisibility), graphql_name="visibility") + """Indicates the repository's visibility level.""" + + template = sgqlc.types.Field(Boolean, graphql_name="template") + """Whether this repository should be marked as a template such that + anyone who can access it can create new repositories with the same + files and directory structure. + """ + + homepage_url = sgqlc.types.Field(URI, graphql_name="homepageUrl") + """The URL for a web page about this repository.""" + + has_wiki_enabled = sgqlc.types.Field(Boolean, graphql_name="hasWikiEnabled") + """Indicates if the repository should have the wiki feature enabled.""" + + has_issues_enabled = sgqlc.types.Field(Boolean, graphql_name="hasIssuesEnabled") + """Indicates if the repository should have the issues feature + enabled. + """ + + team_id = sgqlc.types.Field(ID, graphql_name="teamId") + """When an organization is specified as the owner, this ID identifies + the team that should be granted access to the new repository. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class CreateSponsorsTierInput(sgqlc.types.Input): + """Autogenerated input type of CreateSponsorsTier""" + + __schema__ = github_schema + __field_names__ = ( + "sponsorable_id", + "sponsorable_login", + "amount", + "is_recurring", + "repository_id", + "repository_owner_login", + "repository_name", + "welcome_message", + "description", + "publish", + "client_mutation_id", + ) + sponsorable_id = sgqlc.types.Field(ID, graphql_name="sponsorableId") + """The ID of the user or organization who owns the GitHub Sponsors + profile. Defaults to the current user if omitted and + sponsorableLogin is not given. + """ + + sponsorable_login = sgqlc.types.Field(String, graphql_name="sponsorableLogin") + """The username of the user or organization who owns the GitHub + Sponsors profile. Defaults to the current user if omitted and + sponsorableId is not given. + """ + + amount = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="amount") + """The value of the new tier in US dollars. Valid values: 1-12000.""" + + is_recurring = sgqlc.types.Field(Boolean, graphql_name="isRecurring") + """Whether sponsorships using this tier should happen monthly/yearly + or just once. + """ + + repository_id = sgqlc.types.Field(ID, graphql_name="repositoryId") + """Optional ID of the private repository that sponsors at this tier + should gain read-only access to. Must be owned by an organization. + """ + + repository_owner_login = sgqlc.types.Field(String, graphql_name="repositoryOwnerLogin") + """Optional login of the organization owner of the private repository + that sponsors at this tier should gain read-only access to. + Necessary if repositoryName is given. Will be ignored if + repositoryId is given. + """ + + repository_name = sgqlc.types.Field(String, graphql_name="repositoryName") + """Optional name of the private repository that sponsors at this tier + should gain read-only access to. Must be owned by an organization. + Necessary if repositoryOwnerLogin is given. Will be ignored if + repositoryId is given. + """ + + welcome_message = sgqlc.types.Field(String, graphql_name="welcomeMessage") + """Optional message new sponsors at this tier will receive.""" + + description = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="description") + """A description of what this tier is, what perks sponsors might + receive, what a sponsorship at this tier means for you, etc. + """ + + publish = sgqlc.types.Field(Boolean, graphql_name="publish") + """Whether to make the tier available immediately for sponsors to + choose. Defaults to creating a draft tier that will not be + publicly visible. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class CreateSponsorshipInput(sgqlc.types.Input): + """Autogenerated input type of CreateSponsorship""" + + __schema__ = github_schema + __field_names__ = ( + "sponsor_id", + "sponsor_login", + "sponsorable_id", + "sponsorable_login", + "tier_id", + "amount", + "is_recurring", + "receive_emails", + "privacy_level", + "client_mutation_id", + ) + sponsor_id = sgqlc.types.Field(ID, graphql_name="sponsorId") + """The ID of the user or organization who is acting as the sponsor, + paying for the sponsorship. Required if sponsorLogin is not given. + """ + + sponsor_login = sgqlc.types.Field(String, graphql_name="sponsorLogin") + """The username of the user or organization who is acting as the + sponsor, paying for the sponsorship. Required if sponsorId is not + given. + """ + + sponsorable_id = sgqlc.types.Field(ID, graphql_name="sponsorableId") + """The ID of the user or organization who is receiving the + sponsorship. Required if sponsorableLogin is not given. + """ + + sponsorable_login = sgqlc.types.Field(String, graphql_name="sponsorableLogin") + """The username of the user or organization who is receiving the + sponsorship. Required if sponsorableId is not given. + """ + + tier_id = sgqlc.types.Field(ID, graphql_name="tierId") + """The ID of one of sponsorable's existing tiers to sponsor at. + Required if amount is not specified. + """ + + amount = sgqlc.types.Field(Int, graphql_name="amount") + """The amount to pay to the sponsorable in US dollars. Required if a + tierId is not specified. Valid values: 1-12000. + """ + + is_recurring = sgqlc.types.Field(Boolean, graphql_name="isRecurring") + """Whether the sponsorship should happen monthly/yearly or just this + one time. Required if a tierId is not specified. + """ + + receive_emails = sgqlc.types.Field(Boolean, graphql_name="receiveEmails") + """Whether the sponsor should receive email updates from the + sponsorable. + """ + + privacy_level = sgqlc.types.Field(SponsorshipPrivacy, graphql_name="privacyLevel") + """Specify whether others should be able to see that the sponsor is + sponsoring the sponsorable. Public visibility still does not + reveal which tier is used. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class CreateTeamDiscussionCommentInput(sgqlc.types.Input): + """Autogenerated input type of CreateTeamDiscussionComment""" + + __schema__ = github_schema + __field_names__ = ("discussion_id", "body", "client_mutation_id") + discussion_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="discussionId") + """The ID of the discussion to which the comment belongs.""" + + body = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="body") + """The content of the comment.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class CreateTeamDiscussionInput(sgqlc.types.Input): + """Autogenerated input type of CreateTeamDiscussion""" + + __schema__ = github_schema + __field_names__ = ("team_id", "title", "body", "private", "client_mutation_id") + team_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="teamId") + """The ID of the team to which the discussion belongs.""" + + title = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="title") + """The title of the discussion.""" + + body = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="body") + """The content of the discussion.""" + + private = sgqlc.types.Field(Boolean, graphql_name="private") + """If true, restricts the visibility of this discussion to team + members and organization admins. If false or not specified, allows + any organization member to view this discussion. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class DeclineTopicSuggestionInput(sgqlc.types.Input): + """Autogenerated input type of DeclineTopicSuggestion""" + + __schema__ = github_schema + __field_names__ = ("repository_id", "name", "reason", "client_mutation_id") + repository_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="repositoryId") + """The Node ID of the repository.""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The name of the suggested topic.""" + + reason = sgqlc.types.Field(sgqlc.types.non_null(TopicSuggestionDeclineReason), graphql_name="reason") + """The reason why the suggested topic is declined.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class DeleteBranchProtectionRuleInput(sgqlc.types.Input): + """Autogenerated input type of DeleteBranchProtectionRule""" + + __schema__ = github_schema + __field_names__ = ("branch_protection_rule_id", "client_mutation_id") + branch_protection_rule_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="branchProtectionRuleId") + """The global relay id of the branch protection rule to be deleted.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class DeleteDeploymentInput(sgqlc.types.Input): + """Autogenerated input type of DeleteDeployment""" + + __schema__ = github_schema + __field_names__ = ("id", "client_mutation_id") + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + """The Node ID of the deployment to be deleted.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class DeleteDiscussionCommentInput(sgqlc.types.Input): + """Autogenerated input type of DeleteDiscussionComment""" + + __schema__ = github_schema + __field_names__ = ("id", "client_mutation_id") + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + """The Node id of the discussion comment to delete.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class DeleteDiscussionInput(sgqlc.types.Input): + """Autogenerated input type of DeleteDiscussion""" + + __schema__ = github_schema + __field_names__ = ("id", "client_mutation_id") + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + """The id of the discussion to delete.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class DeleteEnvironmentInput(sgqlc.types.Input): + """Autogenerated input type of DeleteEnvironment""" + + __schema__ = github_schema + __field_names__ = ("id", "client_mutation_id") + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + """The Node ID of the environment to be deleted.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class DeleteIpAllowListEntryInput(sgqlc.types.Input): + """Autogenerated input type of DeleteIpAllowListEntry""" + + __schema__ = github_schema + __field_names__ = ("ip_allow_list_entry_id", "client_mutation_id") + ip_allow_list_entry_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="ipAllowListEntryId") + """The ID of the IP allow list entry to delete.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class DeleteIssueCommentInput(sgqlc.types.Input): + """Autogenerated input type of DeleteIssueComment""" + + __schema__ = github_schema + __field_names__ = ("id", "client_mutation_id") + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + """The ID of the comment to delete.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class DeleteIssueInput(sgqlc.types.Input): + """Autogenerated input type of DeleteIssue""" + + __schema__ = github_schema + __field_names__ = ("issue_id", "client_mutation_id") + issue_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="issueId") + """The ID of the issue to delete.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class DeleteProjectCardInput(sgqlc.types.Input): + """Autogenerated input type of DeleteProjectCard""" + + __schema__ = github_schema + __field_names__ = ("card_id", "client_mutation_id") + card_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="cardId") + """The id of the card to delete.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class DeleteProjectColumnInput(sgqlc.types.Input): + """Autogenerated input type of DeleteProjectColumn""" + + __schema__ = github_schema + __field_names__ = ("column_id", "client_mutation_id") + column_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="columnId") + """The id of the column to delete.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class DeleteProjectInput(sgqlc.types.Input): + """Autogenerated input type of DeleteProject""" + + __schema__ = github_schema + __field_names__ = ("project_id", "client_mutation_id") + project_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="projectId") + """The Project ID to update.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class DeleteProjectNextItemInput(sgqlc.types.Input): + """Autogenerated input type of DeleteProjectNextItem""" + + __schema__ = github_schema + __field_names__ = ("project_id", "item_id", "client_mutation_id") + project_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="projectId") + """The ID of the Project from which the item should be removed.""" + + item_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="itemId") + """The ID of the item to be removed.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class DeletePullRequestReviewCommentInput(sgqlc.types.Input): + """Autogenerated input type of DeletePullRequestReviewComment""" + + __schema__ = github_schema + __field_names__ = ("id", "client_mutation_id") + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + """The ID of the comment to delete.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class DeletePullRequestReviewInput(sgqlc.types.Input): + """Autogenerated input type of DeletePullRequestReview""" + + __schema__ = github_schema + __field_names__ = ("pull_request_review_id", "client_mutation_id") + pull_request_review_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="pullRequestReviewId") + """The Node ID of the pull request review to delete.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class DeleteRefInput(sgqlc.types.Input): + """Autogenerated input type of DeleteRef""" + + __schema__ = github_schema + __field_names__ = ("ref_id", "client_mutation_id") + ref_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="refId") + """The Node ID of the Ref to be deleted.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class DeleteTeamDiscussionCommentInput(sgqlc.types.Input): + """Autogenerated input type of DeleteTeamDiscussionComment""" + + __schema__ = github_schema + __field_names__ = ("id", "client_mutation_id") + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + """The ID of the comment to delete.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class DeleteTeamDiscussionInput(sgqlc.types.Input): + """Autogenerated input type of DeleteTeamDiscussion""" + + __schema__ = github_schema + __field_names__ = ("id", "client_mutation_id") + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + """The discussion ID to delete.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class DeleteVerifiableDomainInput(sgqlc.types.Input): + """Autogenerated input type of DeleteVerifiableDomain""" + + __schema__ = github_schema + __field_names__ = ("id", "client_mutation_id") + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + """The ID of the verifiable domain to delete.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class DeploymentOrder(sgqlc.types.Input): + """Ordering options for deployment connections""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(DeploymentOrderField), graphql_name="field") + """The field to order deployments by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The ordering direction.""" + + +class DisablePullRequestAutoMergeInput(sgqlc.types.Input): + """Autogenerated input type of DisablePullRequestAutoMerge""" + + __schema__ = github_schema + __field_names__ = ("pull_request_id", "client_mutation_id") + pull_request_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="pullRequestId") + """ID of the pull request to disable auto merge on.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class DiscussionOrder(sgqlc.types.Input): + """Ways in which lists of discussions can be ordered upon return.""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(DiscussionOrderField), graphql_name="field") + """The field by which to order discussions.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The direction in which to order discussions by the specified + field. + """ + + +class DiscussionPollOptionOrder(sgqlc.types.Input): + """Ordering options for discussion poll option connections.""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(DiscussionPollOptionOrderField), graphql_name="field") + """The field to order poll options by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The ordering direction.""" + + +class DismissPullRequestReviewInput(sgqlc.types.Input): + """Autogenerated input type of DismissPullRequestReview""" + + __schema__ = github_schema + __field_names__ = ("pull_request_review_id", "message", "client_mutation_id") + pull_request_review_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="pullRequestReviewId") + """The Node ID of the pull request review to modify.""" + + message = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="message") + """The contents of the pull request review dismissal message.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class DismissRepositoryVulnerabilityAlertInput(sgqlc.types.Input): + """Autogenerated input type of DismissRepositoryVulnerabilityAlert""" + + __schema__ = github_schema + __field_names__ = ("repository_vulnerability_alert_id", "dismiss_reason", "client_mutation_id") + repository_vulnerability_alert_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="repositoryVulnerabilityAlertId") + """The Dependabot alert ID to dismiss.""" + + dismiss_reason = sgqlc.types.Field(sgqlc.types.non_null(DismissReason), graphql_name="dismissReason") + """The reason the Dependabot alert is being dismissed.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class DraftPullRequestReviewComment(sgqlc.types.Input): + """Specifies a review comment to be left with a Pull Request Review.""" + + __schema__ = github_schema + __field_names__ = ("path", "position", "body") + path = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="path") + """Path to the file being commented on.""" + + position = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="position") + """Position in the file to leave a comment on.""" + + body = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="body") + """Body of the comment to leave.""" + + +class DraftPullRequestReviewThread(sgqlc.types.Input): + """Specifies a review comment thread to be left with a Pull Request + Review. + """ + + __schema__ = github_schema + __field_names__ = ("path", "line", "side", "start_line", "start_side", "body") + path = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="path") + """Path to the file being commented on.""" + + line = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="line") + """The line of the blob to which the thread refers. The end of the + line range for multi-line comments. + """ + + side = sgqlc.types.Field(DiffSide, graphql_name="side") + """The side of the diff on which the line resides. For multi-line + comments, this is the side for the end of the line range. + """ + + start_line = sgqlc.types.Field(Int, graphql_name="startLine") + """The first line of the range to which the comment refers.""" + + start_side = sgqlc.types.Field(DiffSide, graphql_name="startSide") + """The side of the diff on which the start line resides.""" + + body = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="body") + """Body of the comment to leave.""" + + +class EnablePullRequestAutoMergeInput(sgqlc.types.Input): + """Autogenerated input type of EnablePullRequestAutoMerge""" + + __schema__ = github_schema + __field_names__ = ("pull_request_id", "commit_headline", "commit_body", "merge_method", "author_email", "client_mutation_id") + pull_request_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="pullRequestId") + """ID of the pull request to enable auto-merge on.""" + + commit_headline = sgqlc.types.Field(String, graphql_name="commitHeadline") + """Commit headline to use for the commit when the PR is mergable; if + omitted, a default message will be used. + """ + + commit_body = sgqlc.types.Field(String, graphql_name="commitBody") + """Commit body to use for the commit when the PR is mergable; if + omitted, a default message will be used. + """ + + merge_method = sgqlc.types.Field(PullRequestMergeMethod, graphql_name="mergeMethod") + """The merge method to use. If omitted, defaults to 'MERGE' """ + + author_email = sgqlc.types.Field(String, graphql_name="authorEmail") + """The email address to associate with this merge.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class EnterpriseAdministratorInvitationOrder(sgqlc.types.Input): + """Ordering options for enterprise administrator invitation + connections + """ + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(EnterpriseAdministratorInvitationOrderField), graphql_name="field") + """The field to order enterprise administrator invitations by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The ordering direction.""" + + +class EnterpriseMemberOrder(sgqlc.types.Input): + """Ordering options for enterprise member connections.""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(EnterpriseMemberOrderField), graphql_name="field") + """The field to order enterprise members by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The ordering direction.""" + + +class EnterpriseServerInstallationOrder(sgqlc.types.Input): + """Ordering options for Enterprise Server installation connections.""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(EnterpriseServerInstallationOrderField), graphql_name="field") + """The field to order Enterprise Server installations by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The ordering direction.""" + + +class EnterpriseServerUserAccountEmailOrder(sgqlc.types.Input): + """Ordering options for Enterprise Server user account email + connections. + """ + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(EnterpriseServerUserAccountEmailOrderField), graphql_name="field") + """The field to order emails by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The ordering direction.""" + + +class EnterpriseServerUserAccountOrder(sgqlc.types.Input): + """Ordering options for Enterprise Server user account connections.""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(EnterpriseServerUserAccountOrderField), graphql_name="field") + """The field to order user accounts by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The ordering direction.""" + + +class EnterpriseServerUserAccountsUploadOrder(sgqlc.types.Input): + """Ordering options for Enterprise Server user accounts upload + connections. + """ + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(EnterpriseServerUserAccountsUploadOrderField), graphql_name="field") + """The field to order user accounts uploads by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The ordering direction.""" + + +class FileAddition(sgqlc.types.Input): + """A command to add a file at the given path with the given contents + as part of a commit. Any existing file at that that path will be + replaced. + """ + + __schema__ = github_schema + __field_names__ = ("path", "contents") + path = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="path") + """The path in the repository where the file will be located""" + + contents = sgqlc.types.Field(sgqlc.types.non_null(Base64String), graphql_name="contents") + """The base64 encoded contents of the file""" + + +class FileChanges(sgqlc.types.Input): + """A description of a set of changes to a file tree to be made as + part of a git commit, modeled as zero or more file `additions` and + zero or more file `deletions`. Both fields are optional; omitting + both will produce a commit with no file changes. `deletions` and + `additions` describe changes to files identified by their path in + the git tree using unix-style path separators, i.e. `/`. The root + of a git tree is an empty string, so paths are not slash-prefixed. + `path` values must be unique across all `additions` and + `deletions` provided. Any duplication will result in a validation + error. ### Encoding File contents must be provided in full for + each `FileAddition`. The `contents` of a `FileAddition` must be + encoded using RFC 4648 compliant base64, i.e. correct padding is + required and no characters outside the standard alphabet may be + used. Invalid base64 encoding will be rejected with a validation + error. The encoded contents may be binary. For text files, no + assumptions are made about the character encoding of the file + contents (after base64 decoding). No charset transcoding or line- + ending normalization will be performed; it is the client's + responsibility to manage the character encoding of files they + provide. However, for maximum compatibility we recommend using + UTF-8 encoding and ensuring that all files in a repository use a + consistent line-ending convention (`\n` or `\r\n`), and that all + files end with a newline. ### Modeling file changes Each of the + the five types of conceptual changes that can be made in a git + commit can be described using the `FileChanges` type as follows: + 1. New file addition: create file `hello world\n` at path + `docs/README.txt`: { "additions" [ { + "path": "docs/README.txt", "contents": + base64encode("hello world\n") } ] } 2. + Existing file modification: change existing `docs/README.txt` to + have new content `new content here\n`: { + "additions" [ { "path": "docs/README.txt", + "contents": base64encode("new content here\n") } + ] } 3. Existing file deletion: remove existing file + `docs/README.txt`. Note that the path is required to exist -- + specifying a path that does not exist on the given branch will + abort the commit and return an error. { + "deletions" [ { "path": "docs/README.txt" + } ] } 4. File rename with no changes: rename + `docs/README.txt` with previous content `hello world\n` to the + same content at `newdocs/README.txt`: { + "deletions" [ { "path": "docs/README.txt", + } ], "additions" [ { + "path": "newdocs/README.txt", "contents": + base64encode("hello world\n") } ] } + 5. File rename with changes: rename `docs/README.txt` with + previous content `hello world\n` to a file at path + `newdocs/README.txt` with content `new contents\n`: { + "deletions" [ { "path": "docs/README.txt", + } ], "additions" [ { + "path": "newdocs/README.txt", "contents": + base64encode("new contents\n") } ] } + """ + + __schema__ = github_schema + __field_names__ = ("deletions", "additions") + deletions = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null("FileDeletion")), graphql_name="deletions") + """Files to delete.""" + + additions = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(FileAddition)), graphql_name="additions") + """File to add or change.""" + + +class FileDeletion(sgqlc.types.Input): + """A command to delete the file at the given path as part of a + commit. + """ + + __schema__ = github_schema + __field_names__ = ("path",) + path = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="path") + """The path to delete""" + + +class FollowOrganizationInput(sgqlc.types.Input): + """Autogenerated input type of FollowOrganization""" + + __schema__ = github_schema + __field_names__ = ("organization_id", "client_mutation_id") + organization_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="organizationId") + """ID of the organization to follow.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class FollowUserInput(sgqlc.types.Input): + """Autogenerated input type of FollowUser""" + + __schema__ = github_schema + __field_names__ = ("user_id", "client_mutation_id") + user_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="userId") + """ID of the user to follow.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class GistOrder(sgqlc.types.Input): + """Ordering options for gist connections""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(GistOrderField), graphql_name="field") + """The field to order repositories by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The ordering direction.""" + + +class GrantEnterpriseOrganizationsMigratorRoleInput(sgqlc.types.Input): + """Autogenerated input type of + GrantEnterpriseOrganizationsMigratorRole + """ + + __schema__ = github_schema + __field_names__ = ("enterprise_id", "login", "client_mutation_id") + enterprise_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="enterpriseId") + """The ID of the enterprise to which all organizations managed by it + will be granted the migrator role. + """ + + login = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="login") + """The login of the user to grant the migrator role""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class GrantMigratorRoleInput(sgqlc.types.Input): + """Autogenerated input type of GrantMigratorRole""" + + __schema__ = github_schema + __field_names__ = ("organization_id", "actor", "actor_type", "client_mutation_id") + organization_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="organizationId") + """The ID of the organization that the user/team belongs to.""" + + actor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="actor") + """The user login or Team slug to grant the migrator role.""" + + actor_type = sgqlc.types.Field(sgqlc.types.non_null(ActorType), graphql_name="actorType") + """Specifies the type of the actor, can be either USER or TEAM.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class InviteEnterpriseAdminInput(sgqlc.types.Input): + """Autogenerated input type of InviteEnterpriseAdmin""" + + __schema__ = github_schema + __field_names__ = ("enterprise_id", "invitee", "email", "role", "client_mutation_id") + enterprise_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="enterpriseId") + """The ID of the enterprise to which you want to invite an + administrator. + """ + + invitee = sgqlc.types.Field(String, graphql_name="invitee") + """The login of a user to invite as an administrator.""" + + email = sgqlc.types.Field(String, graphql_name="email") + """The email of the person to invite as an administrator.""" + + role = sgqlc.types.Field(EnterpriseAdministratorRole, graphql_name="role") + """The role of the administrator.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class IpAllowListEntryOrder(sgqlc.types.Input): + """Ordering options for IP allow list entry connections.""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(IpAllowListEntryOrderField), graphql_name="field") + """The field to order IP allow list entries by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The ordering direction.""" + + +class IssueCommentOrder(sgqlc.types.Input): + """Ways in which lists of issue comments can be ordered upon return.""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(IssueCommentOrderField), graphql_name="field") + """The field in which to order issue comments by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The direction in which to order issue comments by the specified + field. + """ + + +class IssueFilters(sgqlc.types.Input): + """Ways in which to filter lists of issues.""" + + __schema__ = github_schema + __field_names__ = ( + "assignee", + "created_by", + "labels", + "mentioned", + "milestone", + "milestone_number", + "since", + "states", + "viewer_subscribed", + ) + assignee = sgqlc.types.Field(String, graphql_name="assignee") + """List issues assigned to given name. Pass in `null` for issues with + no assigned user, and `*` for issues assigned to any user. + """ + + created_by = sgqlc.types.Field(String, graphql_name="createdBy") + """List issues created by given name.""" + + labels = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(String)), graphql_name="labels") + """List issues where the list of label names exist on the issue.""" + + mentioned = sgqlc.types.Field(String, graphql_name="mentioned") + """List issues where the given name is mentioned in the issue.""" + + milestone = sgqlc.types.Field(String, graphql_name="milestone") + """List issues by given milestone argument. If an string + representation of an integer is passed, it should refer to a + milestone by its database ID. Pass in `null` for issues with no + milestone, and `*` for issues that are assigned to any milestone. + """ + + milestone_number = sgqlc.types.Field(String, graphql_name="milestoneNumber") + """List issues by given milestone argument. If an string + representation of an integer is passed, it should refer to a + milestone by its number field. Pass in `null` for issues with no + milestone, and `*` for issues that are assigned to any milestone. + """ + + since = sgqlc.types.Field(DateTime, graphql_name="since") + """List issues that have been updated at or after the given date.""" + + states = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(IssueState)), graphql_name="states") + """List issues filtered by the list of states given.""" + + viewer_subscribed = sgqlc.types.Field(Boolean, graphql_name="viewerSubscribed") + """List issues subscribed to by viewer.""" + + +class IssueOrder(sgqlc.types.Input): + """Ways in which lists of issues can be ordered upon return.""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(IssueOrderField), graphql_name="field") + """The field in which to order issues by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The direction in which to order issues by the specified field.""" + + +class LabelOrder(sgqlc.types.Input): + """Ways in which lists of labels can be ordered upon return.""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(LabelOrderField), graphql_name="field") + """The field in which to order labels by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The direction in which to order labels by the specified field.""" + + +class LanguageOrder(sgqlc.types.Input): + """Ordering options for language connections.""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(LanguageOrderField), graphql_name="field") + """The field to order languages by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The ordering direction.""" + + +class LinkRepositoryToProjectInput(sgqlc.types.Input): + """Autogenerated input type of LinkRepositoryToProject""" + + __schema__ = github_schema + __field_names__ = ("project_id", "repository_id", "client_mutation_id") + project_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="projectId") + """The ID of the Project to link to a Repository""" + + repository_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="repositoryId") + """The ID of the Repository to link to a Project.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class LockLockableInput(sgqlc.types.Input): + """Autogenerated input type of LockLockable""" + + __schema__ = github_schema + __field_names__ = ("lockable_id", "lock_reason", "client_mutation_id") + lockable_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="lockableId") + """ID of the item to be locked.""" + + lock_reason = sgqlc.types.Field(LockReason, graphql_name="lockReason") + """A reason for why the item will be locked.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class MarkDiscussionCommentAsAnswerInput(sgqlc.types.Input): + """Autogenerated input type of MarkDiscussionCommentAsAnswer""" + + __schema__ = github_schema + __field_names__ = ("id", "client_mutation_id") + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + """The Node ID of the discussion comment to mark as an answer.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class MarkFileAsViewedInput(sgqlc.types.Input): + """Autogenerated input type of MarkFileAsViewed""" + + __schema__ = github_schema + __field_names__ = ("pull_request_id", "path", "client_mutation_id") + pull_request_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="pullRequestId") + """The Node ID of the pull request.""" + + path = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="path") + """The path of the file to mark as viewed""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class MarkPullRequestReadyForReviewInput(sgqlc.types.Input): + """Autogenerated input type of MarkPullRequestReadyForReview""" + + __schema__ = github_schema + __field_names__ = ("pull_request_id", "client_mutation_id") + pull_request_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="pullRequestId") + """ID of the pull request to be marked as ready for review.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class MergeBranchInput(sgqlc.types.Input): + """Autogenerated input type of MergeBranch""" + + __schema__ = github_schema + __field_names__ = ("repository_id", "base", "head", "commit_message", "author_email", "client_mutation_id") + repository_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="repositoryId") + """The Node ID of the Repository containing the base branch that will + be modified. + """ + + base = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="base") + """The name of the base branch that the provided head will be merged + into. + """ + + head = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="head") + """The head to merge into the base branch. This can be a branch name + or a commit GitObjectID. + """ + + commit_message = sgqlc.types.Field(String, graphql_name="commitMessage") + """Message to use for the merge commit. If omitted, a default will be + used. + """ + + author_email = sgqlc.types.Field(String, graphql_name="authorEmail") + """The email address to associate with this commit.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class MergePullRequestInput(sgqlc.types.Input): + """Autogenerated input type of MergePullRequest""" + + __schema__ = github_schema + __field_names__ = ( + "pull_request_id", + "commit_headline", + "commit_body", + "expected_head_oid", + "merge_method", + "author_email", + "client_mutation_id", + ) + pull_request_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="pullRequestId") + """ID of the pull request to be merged.""" + + commit_headline = sgqlc.types.Field(String, graphql_name="commitHeadline") + """Commit headline to use for the merge commit; if omitted, a default + message will be used. + """ + + commit_body = sgqlc.types.Field(String, graphql_name="commitBody") + """Commit body to use for the merge commit; if omitted, a default + message will be used + """ + + expected_head_oid = sgqlc.types.Field(GitObjectID, graphql_name="expectedHeadOid") + """OID that the pull request head ref must match to allow merge; if + omitted, no check is performed. + """ + + merge_method = sgqlc.types.Field(PullRequestMergeMethod, graphql_name="mergeMethod") + """The merge method to use. If omitted, defaults to 'MERGE' """ + + author_email = sgqlc.types.Field(String, graphql_name="authorEmail") + """The email address to associate with this merge.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class MilestoneOrder(sgqlc.types.Input): + """Ordering options for milestone connections.""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(MilestoneOrderField), graphql_name="field") + """The field to order milestones by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The ordering direction.""" + + +class MinimizeCommentInput(sgqlc.types.Input): + """Autogenerated input type of MinimizeComment""" + + __schema__ = github_schema + __field_names__ = ("subject_id", "classifier", "client_mutation_id") + subject_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="subjectId") + """The Node ID of the subject to modify.""" + + classifier = sgqlc.types.Field(sgqlc.types.non_null(ReportedContentClassifiers), graphql_name="classifier") + """The classification of comment""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class MoveProjectCardInput(sgqlc.types.Input): + """Autogenerated input type of MoveProjectCard""" + + __schema__ = github_schema + __field_names__ = ("card_id", "column_id", "after_card_id", "client_mutation_id") + card_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="cardId") + """The id of the card to move.""" + + column_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="columnId") + """The id of the column to move it into.""" + + after_card_id = sgqlc.types.Field(ID, graphql_name="afterCardId") + """Place the new card after the card with this id. Pass null to place + it at the top. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class MoveProjectColumnInput(sgqlc.types.Input): + """Autogenerated input type of MoveProjectColumn""" + + __schema__ = github_schema + __field_names__ = ("column_id", "after_column_id", "client_mutation_id") + column_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="columnId") + """The id of the column to move.""" + + after_column_id = sgqlc.types.Field(ID, graphql_name="afterColumnId") + """Place the new column after the column with this id. Pass null to + place it at the front. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class OrgEnterpriseOwnerOrder(sgqlc.types.Input): + """Ordering options for an organization's enterprise owner + connections. + """ + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(OrgEnterpriseOwnerOrderField), graphql_name="field") + """The field to order enterprise owners by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The ordering direction.""" + + +class OrganizationOrder(sgqlc.types.Input): + """Ordering options for organization connections.""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(OrganizationOrderField), graphql_name="field") + """The field to order organizations by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The ordering direction.""" + + +class PackageFileOrder(sgqlc.types.Input): + """Ways in which lists of package files can be ordered upon return.""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(PackageFileOrderField, graphql_name="field") + """The field in which to order package files by.""" + + direction = sgqlc.types.Field(OrderDirection, graphql_name="direction") + """The direction in which to order package files by the specified + field. + """ + + +class PackageOrder(sgqlc.types.Input): + """Ways in which lists of packages can be ordered upon return.""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(PackageOrderField, graphql_name="field") + """The field in which to order packages by.""" + + direction = sgqlc.types.Field(OrderDirection, graphql_name="direction") + """The direction in which to order packages by the specified field.""" + + +class PackageVersionOrder(sgqlc.types.Input): + """Ways in which lists of package versions can be ordered upon + return. + """ + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(PackageVersionOrderField, graphql_name="field") + """The field in which to order package versions by.""" + + direction = sgqlc.types.Field(OrderDirection, graphql_name="direction") + """The direction in which to order package versions by the specified + field. + """ + + +class PinIssueInput(sgqlc.types.Input): + """Autogenerated input type of PinIssue""" + + __schema__ = github_schema + __field_names__ = ("issue_id", "client_mutation_id") + issue_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="issueId") + """The ID of the issue to be pinned""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class ProjectOrder(sgqlc.types.Input): + """Ways in which lists of projects can be ordered upon return.""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(ProjectOrderField), graphql_name="field") + """The field in which to order projects by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The direction in which to order projects by the specified field.""" + + +class PullRequestOrder(sgqlc.types.Input): + """Ways in which lists of issues can be ordered upon return.""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(PullRequestOrderField), graphql_name="field") + """The field in which to order pull requests by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The direction in which to order pull requests by the specified + field. + """ + + +class ReactionOrder(sgqlc.types.Input): + """Ways in which lists of reactions can be ordered upon return.""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(ReactionOrderField), graphql_name="field") + """The field in which to order reactions by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The direction in which to order reactions by the specified field.""" + + +class RefOrder(sgqlc.types.Input): + """Ways in which lists of git refs can be ordered upon return.""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(RefOrderField), graphql_name="field") + """The field in which to order refs by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The direction in which to order refs by the specified field.""" + + +class RegenerateEnterpriseIdentityProviderRecoveryCodesInput(sgqlc.types.Input): + """Autogenerated input type of + RegenerateEnterpriseIdentityProviderRecoveryCodes + """ + + __schema__ = github_schema + __field_names__ = ("enterprise_id", "client_mutation_id") + enterprise_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="enterpriseId") + """The ID of the enterprise on which to set an identity provider.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class RegenerateVerifiableDomainTokenInput(sgqlc.types.Input): + """Autogenerated input type of RegenerateVerifiableDomainToken""" + + __schema__ = github_schema + __field_names__ = ("id", "client_mutation_id") + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + """The ID of the verifiable domain to regenerate the verification + token of. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class RejectDeploymentsInput(sgqlc.types.Input): + """Autogenerated input type of RejectDeployments""" + + __schema__ = github_schema + __field_names__ = ("workflow_run_id", "environment_ids", "comment", "client_mutation_id") + workflow_run_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="workflowRunId") + """The node ID of the workflow run containing the pending + deployments. + """ + + environment_ids = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(ID))), graphql_name="environmentIds") + """The ids of environments to reject deployments""" + + comment = sgqlc.types.Field(String, graphql_name="comment") + """Optional comment for rejecting deployments""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class ReleaseOrder(sgqlc.types.Input): + """Ways in which lists of releases can be ordered upon return.""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(ReleaseOrderField), graphql_name="field") + """The field in which to order releases by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The direction in which to order releases by the specified field.""" + + +class RemoveAssigneesFromAssignableInput(sgqlc.types.Input): + """Autogenerated input type of RemoveAssigneesFromAssignable""" + + __schema__ = github_schema + __field_names__ = ("assignable_id", "assignee_ids", "client_mutation_id") + assignable_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="assignableId") + """The id of the assignable object to remove assignees from.""" + + assignee_ids = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(ID))), graphql_name="assigneeIds") + """The id of users to remove as assignees.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class RemoveEnterpriseAdminInput(sgqlc.types.Input): + """Autogenerated input type of RemoveEnterpriseAdmin""" + + __schema__ = github_schema + __field_names__ = ("enterprise_id", "login", "client_mutation_id") + enterprise_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="enterpriseId") + """The Enterprise ID from which to remove the administrator.""" + + login = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="login") + """The login of the user to remove as an administrator.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class RemoveEnterpriseIdentityProviderInput(sgqlc.types.Input): + """Autogenerated input type of RemoveEnterpriseIdentityProvider""" + + __schema__ = github_schema + __field_names__ = ("enterprise_id", "client_mutation_id") + enterprise_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="enterpriseId") + """The ID of the enterprise from which to remove the identity + provider. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class RemoveEnterpriseOrganizationInput(sgqlc.types.Input): + """Autogenerated input type of RemoveEnterpriseOrganization""" + + __schema__ = github_schema + __field_names__ = ("enterprise_id", "organization_id", "client_mutation_id") + enterprise_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="enterpriseId") + """The ID of the enterprise from which the organization should be + removed. + """ + + organization_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="organizationId") + """The ID of the organization to remove from the enterprise.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class RemoveEnterpriseSupportEntitlementInput(sgqlc.types.Input): + """Autogenerated input type of RemoveEnterpriseSupportEntitlement""" + + __schema__ = github_schema + __field_names__ = ("enterprise_id", "login", "client_mutation_id") + enterprise_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="enterpriseId") + """The ID of the Enterprise which the admin belongs to.""" + + login = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="login") + """The login of a member who will lose the support entitlement.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class RemoveLabelsFromLabelableInput(sgqlc.types.Input): + """Autogenerated input type of RemoveLabelsFromLabelable""" + + __schema__ = github_schema + __field_names__ = ("labelable_id", "label_ids", "client_mutation_id") + labelable_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="labelableId") + """The id of the Labelable to remove labels from.""" + + label_ids = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(ID))), graphql_name="labelIds") + """The ids of labels to remove.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class RemoveOutsideCollaboratorInput(sgqlc.types.Input): + """Autogenerated input type of RemoveOutsideCollaborator""" + + __schema__ = github_schema + __field_names__ = ("user_id", "organization_id", "client_mutation_id") + user_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="userId") + """The ID of the outside collaborator to remove.""" + + organization_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="organizationId") + """The ID of the organization to remove the outside collaborator + from. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class RemoveReactionInput(sgqlc.types.Input): + """Autogenerated input type of RemoveReaction""" + + __schema__ = github_schema + __field_names__ = ("subject_id", "content", "client_mutation_id") + subject_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="subjectId") + """The Node ID of the subject to modify.""" + + content = sgqlc.types.Field(sgqlc.types.non_null(ReactionContent), graphql_name="content") + """The name of the emoji reaction to remove.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class RemoveStarInput(sgqlc.types.Input): + """Autogenerated input type of RemoveStar""" + + __schema__ = github_schema + __field_names__ = ("starrable_id", "client_mutation_id") + starrable_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="starrableId") + """The Starrable ID to unstar.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class RemoveUpvoteInput(sgqlc.types.Input): + """Autogenerated input type of RemoveUpvote""" + + __schema__ = github_schema + __field_names__ = ("subject_id", "client_mutation_id") + subject_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="subjectId") + """The Node ID of the discussion or comment to remove upvote.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class ReopenIssueInput(sgqlc.types.Input): + """Autogenerated input type of ReopenIssue""" + + __schema__ = github_schema + __field_names__ = ("issue_id", "client_mutation_id") + issue_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="issueId") + """ID of the issue to be opened.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class ReopenPullRequestInput(sgqlc.types.Input): + """Autogenerated input type of ReopenPullRequest""" + + __schema__ = github_schema + __field_names__ = ("pull_request_id", "client_mutation_id") + pull_request_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="pullRequestId") + """ID of the pull request to be reopened.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class RepositoryInvitationOrder(sgqlc.types.Input): + """Ordering options for repository invitation connections.""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(RepositoryInvitationOrderField), graphql_name="field") + """The field to order repository invitations by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The ordering direction.""" + + +class RepositoryMigrationOrder(sgqlc.types.Input): + """Ordering options for repository migrations.""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(RepositoryMigrationOrderField), graphql_name="field") + """The field to order repository migrations by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(RepositoryMigrationOrderDirection), graphql_name="direction") + """The ordering direction.""" + + +class RepositoryOrder(sgqlc.types.Input): + """Ordering options for repository connections""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(RepositoryOrderField), graphql_name="field") + """The field to order repositories by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The ordering direction.""" + + +class RequestReviewsInput(sgqlc.types.Input): + """Autogenerated input type of RequestReviews""" + + __schema__ = github_schema + __field_names__ = ("pull_request_id", "user_ids", "team_ids", "union", "client_mutation_id") + pull_request_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="pullRequestId") + """The Node ID of the pull request to modify.""" + + user_ids = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(ID)), graphql_name="userIds") + """The Node IDs of the user to request.""" + + team_ids = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(ID)), graphql_name="teamIds") + """The Node IDs of the team to request.""" + + union = sgqlc.types.Field(Boolean, graphql_name="union") + """Add users to the set rather than replace.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class RequiredStatusCheckInput(sgqlc.types.Input): + """Specifies the attributes for a new or updated required status + check. + """ + + __schema__ = github_schema + __field_names__ = ("context", "app_id") + context = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="context") + """Status check context that must pass for commits to be accepted to + the matching branch. + """ + + app_id = sgqlc.types.Field(ID, graphql_name="appId") + """The ID of the App that must set the status in order for it to be + accepted. Omit this value to use whichever app has recently been + setting this status, or use "any" to allow any app to set the + status. + """ + + +class RerequestCheckSuiteInput(sgqlc.types.Input): + """Autogenerated input type of RerequestCheckSuite""" + + __schema__ = github_schema + __field_names__ = ("repository_id", "check_suite_id", "client_mutation_id") + repository_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="repositoryId") + """The Node ID of the repository.""" + + check_suite_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="checkSuiteId") + """The Node ID of the check suite.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class ResolveReviewThreadInput(sgqlc.types.Input): + """Autogenerated input type of ResolveReviewThread""" + + __schema__ = github_schema + __field_names__ = ("thread_id", "client_mutation_id") + thread_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="threadId") + """The ID of the thread to resolve""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class RevokeEnterpriseOrganizationsMigratorRoleInput(sgqlc.types.Input): + """Autogenerated input type of + RevokeEnterpriseOrganizationsMigratorRole + """ + + __schema__ = github_schema + __field_names__ = ("enterprise_id", "login", "client_mutation_id") + enterprise_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="enterpriseId") + """The ID of the enterprise to which all organizations managed by it + will be granted the migrator role. + """ + + login = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="login") + """The login of the user to revoke the migrator role""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class RevokeMigratorRoleInput(sgqlc.types.Input): + """Autogenerated input type of RevokeMigratorRole""" + + __schema__ = github_schema + __field_names__ = ("organization_id", "actor", "actor_type", "client_mutation_id") + organization_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="organizationId") + """The ID of the organization that the user/team belongs to.""" + + actor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="actor") + """The user login or Team slug to revoke the migrator role from.""" + + actor_type = sgqlc.types.Field(sgqlc.types.non_null(ActorType), graphql_name="actorType") + """Specifies the type of the actor, can be either USER or TEAM.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class SavedReplyOrder(sgqlc.types.Input): + """Ordering options for saved reply connections.""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(SavedReplyOrderField), graphql_name="field") + """The field to order saved replies by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The ordering direction.""" + + +class SecurityAdvisoryIdentifierFilter(sgqlc.types.Input): + """An advisory identifier to filter results on.""" + + __schema__ = github_schema + __field_names__ = ("type", "value") + type = sgqlc.types.Field(sgqlc.types.non_null(SecurityAdvisoryIdentifierType), graphql_name="type") + """The identifier type.""" + + value = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="value") + """The identifier string. Supports exact or partial matching.""" + + +class SecurityAdvisoryOrder(sgqlc.types.Input): + """Ordering options for security advisory connections""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(SecurityAdvisoryOrderField), graphql_name="field") + """The field to order security advisories by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The ordering direction.""" + + +class SecurityVulnerabilityOrder(sgqlc.types.Input): + """Ordering options for security vulnerability connections""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(SecurityVulnerabilityOrderField), graphql_name="field") + """The field to order security vulnerabilities by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The ordering direction.""" + + +class SetEnterpriseIdentityProviderInput(sgqlc.types.Input): + """Autogenerated input type of SetEnterpriseIdentityProvider""" + + __schema__ = github_schema + __field_names__ = ("enterprise_id", "sso_url", "issuer", "idp_certificate", "signature_method", "digest_method", "client_mutation_id") + enterprise_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="enterpriseId") + """The ID of the enterprise on which to set an identity provider.""" + + sso_url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="ssoUrl") + """The URL endpoint for the identity provider's SAML SSO.""" + + issuer = sgqlc.types.Field(String, graphql_name="issuer") + """The Issuer Entity ID for the SAML identity provider""" + + idp_certificate = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="idpCertificate") + """The x509 certificate used by the identity provider to sign + assertions and responses. + """ + + signature_method = sgqlc.types.Field(sgqlc.types.non_null(SamlSignatureAlgorithm), graphql_name="signatureMethod") + """The signature algorithm used to sign SAML requests for the + identity provider. + """ + + digest_method = sgqlc.types.Field(sgqlc.types.non_null(SamlDigestAlgorithm), graphql_name="digestMethod") + """The digest algorithm used to sign SAML requests for the identity + provider. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class SetOrganizationInteractionLimitInput(sgqlc.types.Input): + """Autogenerated input type of SetOrganizationInteractionLimit""" + + __schema__ = github_schema + __field_names__ = ("organization_id", "limit", "expiry", "client_mutation_id") + organization_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="organizationId") + """The ID of the organization to set a limit for.""" + + limit = sgqlc.types.Field(sgqlc.types.non_null(RepositoryInteractionLimit), graphql_name="limit") + """The limit to set.""" + + expiry = sgqlc.types.Field(RepositoryInteractionLimitExpiry, graphql_name="expiry") + """When this limit should expire.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class SetRepositoryInteractionLimitInput(sgqlc.types.Input): + """Autogenerated input type of SetRepositoryInteractionLimit""" + + __schema__ = github_schema + __field_names__ = ("repository_id", "limit", "expiry", "client_mutation_id") + repository_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="repositoryId") + """The ID of the repository to set a limit for.""" + + limit = sgqlc.types.Field(sgqlc.types.non_null(RepositoryInteractionLimit), graphql_name="limit") + """The limit to set.""" + + expiry = sgqlc.types.Field(RepositoryInteractionLimitExpiry, graphql_name="expiry") + """When this limit should expire.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class SetUserInteractionLimitInput(sgqlc.types.Input): + """Autogenerated input type of SetUserInteractionLimit""" + + __schema__ = github_schema + __field_names__ = ("user_id", "limit", "expiry", "client_mutation_id") + user_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="userId") + """The ID of the user to set a limit for.""" + + limit = sgqlc.types.Field(sgqlc.types.non_null(RepositoryInteractionLimit), graphql_name="limit") + """The limit to set.""" + + expiry = sgqlc.types.Field(RepositoryInteractionLimitExpiry, graphql_name="expiry") + """When this limit should expire.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class SponsorOrder(sgqlc.types.Input): + """Ordering options for connections to get sponsor entities for + GitHub Sponsors. + """ + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(SponsorOrderField), graphql_name="field") + """The field to order sponsor entities by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The ordering direction.""" + + +class SponsorableOrder(sgqlc.types.Input): + """Ordering options for connections to get sponsorable entities for + GitHub Sponsors. + """ + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(SponsorableOrderField), graphql_name="field") + """The field to order sponsorable entities by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The ordering direction.""" + + +class SponsorsActivityOrder(sgqlc.types.Input): + """Ordering options for GitHub Sponsors activity connections.""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(SponsorsActivityOrderField), graphql_name="field") + """The field to order activity by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The ordering direction.""" + + +class SponsorsTierOrder(sgqlc.types.Input): + """Ordering options for Sponsors tiers connections.""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(SponsorsTierOrderField), graphql_name="field") + """The field to order tiers by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The ordering direction.""" + + +class SponsorshipNewsletterOrder(sgqlc.types.Input): + """Ordering options for sponsorship newsletter connections.""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(SponsorshipNewsletterOrderField), graphql_name="field") + """The field to order sponsorship newsletters by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The ordering direction.""" + + +class SponsorshipOrder(sgqlc.types.Input): + """Ordering options for sponsorship connections.""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(SponsorshipOrderField), graphql_name="field") + """The field to order sponsorship by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The ordering direction.""" + + +class StarOrder(sgqlc.types.Input): + """Ways in which star connections can be ordered.""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(StarOrderField), graphql_name="field") + """The field in which to order nodes by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The direction in which to order nodes.""" + + +class StartRepositoryMigrationInput(sgqlc.types.Input): + """Autogenerated input type of StartRepositoryMigration""" + + __schema__ = github_schema + __field_names__ = ( + "source_id", + "owner_id", + "source_repository_url", + "repository_name", + "continue_on_error", + "git_archive_url", + "metadata_archive_url", + "access_token", + "github_pat", + "skip_releases", + "client_mutation_id", + ) + source_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="sourceId") + """The ID of the Octoshift migration source.""" + + owner_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="ownerId") + """The ID of the organization that will own the imported repository.""" + + source_repository_url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="sourceRepositoryUrl") + """The Octoshift migration source repository URL.""" + + repository_name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="repositoryName") + """The name of the imported repository.""" + + continue_on_error = sgqlc.types.Field(Boolean, graphql_name="continueOnError") + """Whether to continue the migration on error""" + + git_archive_url = sgqlc.types.Field(String, graphql_name="gitArchiveUrl") + """The signed URL to access the user-uploaded git archive""" + + metadata_archive_url = sgqlc.types.Field(String, graphql_name="metadataArchiveUrl") + """The signed URL to access the user-uploaded metadata archive""" + + access_token = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="accessToken") + """The Octoshift migration source access token.""" + + github_pat = sgqlc.types.Field(String, graphql_name="githubPat") + """The GitHub personal access token of the user importing to the + target repository. + """ + + skip_releases = sgqlc.types.Field(Boolean, graphql_name="skipReleases") + """Whether to skip migrating releases for the repository.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class SubmitPullRequestReviewInput(sgqlc.types.Input): + """Autogenerated input type of SubmitPullRequestReview""" + + __schema__ = github_schema + __field_names__ = ("pull_request_id", "pull_request_review_id", "event", "body", "client_mutation_id") + pull_request_id = sgqlc.types.Field(ID, graphql_name="pullRequestId") + """The Pull Request ID to submit any pending reviews.""" + + pull_request_review_id = sgqlc.types.Field(ID, graphql_name="pullRequestReviewId") + """The Pull Request Review ID to submit.""" + + event = sgqlc.types.Field(sgqlc.types.non_null(PullRequestReviewEvent), graphql_name="event") + """The event to send to the Pull Request Review.""" + + body = sgqlc.types.Field(String, graphql_name="body") + """The text field to set on the Pull Request Review.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class TeamDiscussionCommentOrder(sgqlc.types.Input): + """Ways in which team discussion comment connections can be ordered.""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(TeamDiscussionCommentOrderField), graphql_name="field") + """The field by which to order nodes.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The direction in which to order nodes.""" + + +class TeamDiscussionOrder(sgqlc.types.Input): + """Ways in which team discussion connections can be ordered.""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(TeamDiscussionOrderField), graphql_name="field") + """The field by which to order nodes.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The direction in which to order nodes.""" + + +class TeamMemberOrder(sgqlc.types.Input): + """Ordering options for team member connections""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(TeamMemberOrderField), graphql_name="field") + """The field to order team members by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The ordering direction.""" + + +class TeamOrder(sgqlc.types.Input): + """Ways in which team connections can be ordered.""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(TeamOrderField), graphql_name="field") + """The field in which to order nodes by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The direction in which to order nodes.""" + + +class TeamRepositoryOrder(sgqlc.types.Input): + """Ordering options for team repository connections""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(TeamRepositoryOrderField), graphql_name="field") + """The field to order repositories by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The ordering direction.""" + + +class TransferIssueInput(sgqlc.types.Input): + """Autogenerated input type of TransferIssue""" + + __schema__ = github_schema + __field_names__ = ("issue_id", "repository_id", "client_mutation_id") + issue_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="issueId") + """The Node ID of the issue to be transferred""" + + repository_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="repositoryId") + """The Node ID of the repository the issue should be transferred to""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UnarchiveRepositoryInput(sgqlc.types.Input): + """Autogenerated input type of UnarchiveRepository""" + + __schema__ = github_schema + __field_names__ = ("repository_id", "client_mutation_id") + repository_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="repositoryId") + """The ID of the repository to unarchive.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UnfollowOrganizationInput(sgqlc.types.Input): + """Autogenerated input type of UnfollowOrganization""" + + __schema__ = github_schema + __field_names__ = ("organization_id", "client_mutation_id") + organization_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="organizationId") + """ID of the organization to unfollow.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UnfollowUserInput(sgqlc.types.Input): + """Autogenerated input type of UnfollowUser""" + + __schema__ = github_schema + __field_names__ = ("user_id", "client_mutation_id") + user_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="userId") + """ID of the user to unfollow.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UnlinkRepositoryFromProjectInput(sgqlc.types.Input): + """Autogenerated input type of UnlinkRepositoryFromProject""" + + __schema__ = github_schema + __field_names__ = ("project_id", "repository_id", "client_mutation_id") + project_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="projectId") + """The ID of the Project linked to the Repository.""" + + repository_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="repositoryId") + """The ID of the Repository linked to the Project.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UnlockLockableInput(sgqlc.types.Input): + """Autogenerated input type of UnlockLockable""" + + __schema__ = github_schema + __field_names__ = ("lockable_id", "client_mutation_id") + lockable_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="lockableId") + """ID of the item to be unlocked.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UnmarkDiscussionCommentAsAnswerInput(sgqlc.types.Input): + """Autogenerated input type of UnmarkDiscussionCommentAsAnswer""" + + __schema__ = github_schema + __field_names__ = ("id", "client_mutation_id") + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + """The Node ID of the discussion comment to unmark as an answer.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UnmarkFileAsViewedInput(sgqlc.types.Input): + """Autogenerated input type of UnmarkFileAsViewed""" + + __schema__ = github_schema + __field_names__ = ("pull_request_id", "path", "client_mutation_id") + pull_request_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="pullRequestId") + """The Node ID of the pull request.""" + + path = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="path") + """The path of the file to mark as unviewed""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UnmarkIssueAsDuplicateInput(sgqlc.types.Input): + """Autogenerated input type of UnmarkIssueAsDuplicate""" + + __schema__ = github_schema + __field_names__ = ("duplicate_id", "canonical_id", "client_mutation_id") + duplicate_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="duplicateId") + """ID of the issue or pull request currently marked as a duplicate.""" + + canonical_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="canonicalId") + """ID of the issue or pull request currently considered + canonical/authoritative/original. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UnminimizeCommentInput(sgqlc.types.Input): + """Autogenerated input type of UnminimizeComment""" + + __schema__ = github_schema + __field_names__ = ("subject_id", "client_mutation_id") + subject_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="subjectId") + """The Node ID of the subject to modify.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UnpinIssueInput(sgqlc.types.Input): + """Autogenerated input type of UnpinIssue""" + + __schema__ = github_schema + __field_names__ = ("issue_id", "client_mutation_id") + issue_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="issueId") + """The ID of the issue to be unpinned""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UnresolveReviewThreadInput(sgqlc.types.Input): + """Autogenerated input type of UnresolveReviewThread""" + + __schema__ = github_schema + __field_names__ = ("thread_id", "client_mutation_id") + thread_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="threadId") + """The ID of the thread to unresolve""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateBranchProtectionRuleInput(sgqlc.types.Input): + """Autogenerated input type of UpdateBranchProtectionRule""" + + __schema__ = github_schema + __field_names__ = ( + "branch_protection_rule_id", + "pattern", + "requires_approving_reviews", + "required_approving_review_count", + "requires_commit_signatures", + "requires_linear_history", + "blocks_creations", + "allows_force_pushes", + "allows_deletions", + "is_admin_enforced", + "requires_status_checks", + "requires_strict_status_checks", + "requires_code_owner_reviews", + "dismisses_stale_reviews", + "restricts_review_dismissals", + "review_dismissal_actor_ids", + "bypass_pull_request_actor_ids", + "bypass_force_push_actor_ids", + "restricts_pushes", + "push_actor_ids", + "required_status_check_contexts", + "required_status_checks", + "requires_conversation_resolution", + "client_mutation_id", + ) + branch_protection_rule_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="branchProtectionRuleId") + """The global relay id of the branch protection rule to be updated.""" + + pattern = sgqlc.types.Field(String, graphql_name="pattern") + """The glob-like pattern used to determine matching branches.""" + + requires_approving_reviews = sgqlc.types.Field(Boolean, graphql_name="requiresApprovingReviews") + """Are approving reviews required to update matching branches.""" + + required_approving_review_count = sgqlc.types.Field(Int, graphql_name="requiredApprovingReviewCount") + """Number of approving reviews required to update matching branches.""" + + requires_commit_signatures = sgqlc.types.Field(Boolean, graphql_name="requiresCommitSignatures") + """Are commits required to be signed.""" + + requires_linear_history = sgqlc.types.Field(Boolean, graphql_name="requiresLinearHistory") + """Are merge commits prohibited from being pushed to this branch.""" + + blocks_creations = sgqlc.types.Field(Boolean, graphql_name="blocksCreations") + """Is branch creation a protected operation.""" + + allows_force_pushes = sgqlc.types.Field(Boolean, graphql_name="allowsForcePushes") + """Are force pushes allowed on this branch.""" + + allows_deletions = sgqlc.types.Field(Boolean, graphql_name="allowsDeletions") + """Can this branch be deleted.""" + + is_admin_enforced = sgqlc.types.Field(Boolean, graphql_name="isAdminEnforced") + """Can admins overwrite branch protection.""" + + requires_status_checks = sgqlc.types.Field(Boolean, graphql_name="requiresStatusChecks") + """Are status checks required to update matching branches.""" + + requires_strict_status_checks = sgqlc.types.Field(Boolean, graphql_name="requiresStrictStatusChecks") + """Are branches required to be up to date before merging.""" + + requires_code_owner_reviews = sgqlc.types.Field(Boolean, graphql_name="requiresCodeOwnerReviews") + """Are reviews from code owners required to update matching branches.""" + + dismisses_stale_reviews = sgqlc.types.Field(Boolean, graphql_name="dismissesStaleReviews") + """Will new commits pushed to matching branches dismiss pull request + review approvals. + """ + + restricts_review_dismissals = sgqlc.types.Field(Boolean, graphql_name="restrictsReviewDismissals") + """Is dismissal of pull request reviews restricted.""" + + review_dismissal_actor_ids = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(ID)), graphql_name="reviewDismissalActorIds") + """A list of User, Team, or App IDs allowed to dismiss reviews on + pull requests targeting matching branches. + """ + + bypass_pull_request_actor_ids = sgqlc.types.Field( + sgqlc.types.list_of(sgqlc.types.non_null(ID)), graphql_name="bypassPullRequestActorIds" + ) + """A list of User, Team, or App IDs allowed to bypass pull requests + targeting matching branches. + """ + + bypass_force_push_actor_ids = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(ID)), graphql_name="bypassForcePushActorIds") + """A list of User, Team, or App IDs allowed to bypass force push + targeting matching branches. + """ + + restricts_pushes = sgqlc.types.Field(Boolean, graphql_name="restrictsPushes") + """Is pushing to matching branches restricted.""" + + push_actor_ids = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(ID)), graphql_name="pushActorIds") + """A list of User, Team, or App IDs allowed to push to matching + branches. + """ + + required_status_check_contexts = sgqlc.types.Field( + sgqlc.types.list_of(sgqlc.types.non_null(String)), graphql_name="requiredStatusCheckContexts" + ) + """List of required status check contexts that must pass for commits + to be accepted to matching branches. + """ + + required_status_checks = sgqlc.types.Field( + sgqlc.types.list_of(sgqlc.types.non_null(RequiredStatusCheckInput)), graphql_name="requiredStatusChecks" + ) + """The list of required status checks""" + + requires_conversation_resolution = sgqlc.types.Field(Boolean, graphql_name="requiresConversationResolution") + """Are conversations required to be resolved before merging.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateCheckRunInput(sgqlc.types.Input): + """Autogenerated input type of UpdateCheckRun""" + + __schema__ = github_schema + __field_names__ = ( + "repository_id", + "check_run_id", + "name", + "details_url", + "external_id", + "status", + "started_at", + "conclusion", + "completed_at", + "output", + "actions", + "client_mutation_id", + ) + repository_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="repositoryId") + """The node ID of the repository.""" + + check_run_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="checkRunId") + """The node of the check.""" + + name = sgqlc.types.Field(String, graphql_name="name") + """The name of the check.""" + + details_url = sgqlc.types.Field(URI, graphql_name="detailsUrl") + """The URL of the integrator's site that has the full details of the + check. + """ + + external_id = sgqlc.types.Field(String, graphql_name="externalId") + """A reference for the run on the integrator's system.""" + + status = sgqlc.types.Field(RequestableCheckStatusState, graphql_name="status") + """The current status.""" + + started_at = sgqlc.types.Field(DateTime, graphql_name="startedAt") + """The time that the check run began.""" + + conclusion = sgqlc.types.Field(CheckConclusionState, graphql_name="conclusion") + """The final conclusion of the check.""" + + completed_at = sgqlc.types.Field(DateTime, graphql_name="completedAt") + """The time that the check run finished.""" + + output = sgqlc.types.Field(CheckRunOutput, graphql_name="output") + """Descriptive details about the run.""" + + actions = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(CheckRunAction)), graphql_name="actions") + """Possible further actions the integrator can perform, which a user + may trigger. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateCheckSuitePreferencesInput(sgqlc.types.Input): + """Autogenerated input type of UpdateCheckSuitePreferences""" + + __schema__ = github_schema + __field_names__ = ("repository_id", "auto_trigger_preferences", "client_mutation_id") + repository_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="repositoryId") + """The Node ID of the repository.""" + + auto_trigger_preferences = sgqlc.types.Field( + sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(CheckSuiteAutoTriggerPreference))), + graphql_name="autoTriggerPreferences", + ) + """The check suite preferences to modify.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateDiscussionCommentInput(sgqlc.types.Input): + """Autogenerated input type of UpdateDiscussionComment""" + + __schema__ = github_schema + __field_names__ = ("comment_id", "body", "client_mutation_id") + comment_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="commentId") + """The Node ID of the discussion comment to update.""" + + body = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="body") + """The new contents of the comment body.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateDiscussionInput(sgqlc.types.Input): + """Autogenerated input type of UpdateDiscussion""" + + __schema__ = github_schema + __field_names__ = ("discussion_id", "title", "body", "category_id", "client_mutation_id") + discussion_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="discussionId") + """The Node ID of the discussion to update.""" + + title = sgqlc.types.Field(String, graphql_name="title") + """The new discussion title.""" + + body = sgqlc.types.Field(String, graphql_name="body") + """The new contents of the discussion body.""" + + category_id = sgqlc.types.Field(ID, graphql_name="categoryId") + """The Node ID of a discussion category within the same repository to + change this discussion to. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateEnterpriseAdministratorRoleInput(sgqlc.types.Input): + """Autogenerated input type of UpdateEnterpriseAdministratorRole""" + + __schema__ = github_schema + __field_names__ = ("enterprise_id", "login", "role", "client_mutation_id") + enterprise_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="enterpriseId") + """The ID of the Enterprise which the admin belongs to.""" + + login = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="login") + """The login of a administrator whose role is being changed.""" + + role = sgqlc.types.Field(sgqlc.types.non_null(EnterpriseAdministratorRole), graphql_name="role") + """The new role for the Enterprise administrator.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateEnterpriseAllowPrivateRepositoryForkingSettingInput(sgqlc.types.Input): + """Autogenerated input type of + UpdateEnterpriseAllowPrivateRepositoryForkingSetting + """ + + __schema__ = github_schema + __field_names__ = ("enterprise_id", "setting_value", "client_mutation_id") + enterprise_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="enterpriseId") + """The ID of the enterprise on which to set the allow private + repository forking setting. + """ + + setting_value = sgqlc.types.Field(sgqlc.types.non_null(EnterpriseEnabledDisabledSettingValue), graphql_name="settingValue") + """The value for the allow private repository forking setting on the + enterprise. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateEnterpriseDefaultRepositoryPermissionSettingInput(sgqlc.types.Input): + """Autogenerated input type of + UpdateEnterpriseDefaultRepositoryPermissionSetting + """ + + __schema__ = github_schema + __field_names__ = ("enterprise_id", "setting_value", "client_mutation_id") + enterprise_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="enterpriseId") + """The ID of the enterprise on which to set the base repository + permission setting. + """ + + setting_value = sgqlc.types.Field(sgqlc.types.non_null(EnterpriseDefaultRepositoryPermissionSettingValue), graphql_name="settingValue") + """The value for the base repository permission setting on the + enterprise. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateEnterpriseMembersCanChangeRepositoryVisibilitySettingInput(sgqlc.types.Input): + """Autogenerated input type of + UpdateEnterpriseMembersCanChangeRepositoryVisibilitySetting + """ + + __schema__ = github_schema + __field_names__ = ("enterprise_id", "setting_value", "client_mutation_id") + enterprise_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="enterpriseId") + """The ID of the enterprise on which to set the members can change + repository visibility setting. + """ + + setting_value = sgqlc.types.Field(sgqlc.types.non_null(EnterpriseEnabledDisabledSettingValue), graphql_name="settingValue") + """The value for the members can change repository visibility setting + on the enterprise. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateEnterpriseMembersCanCreateRepositoriesSettingInput(sgqlc.types.Input): + """Autogenerated input type of + UpdateEnterpriseMembersCanCreateRepositoriesSetting + """ + + __schema__ = github_schema + __field_names__ = ( + "enterprise_id", + "setting_value", + "members_can_create_repositories_policy_enabled", + "members_can_create_public_repositories", + "members_can_create_private_repositories", + "members_can_create_internal_repositories", + "client_mutation_id", + ) + enterprise_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="enterpriseId") + """The ID of the enterprise on which to set the members can create + repositories setting. + """ + + setting_value = sgqlc.types.Field(EnterpriseMembersCanCreateRepositoriesSettingValue, graphql_name="settingValue") + """Value for the members can create repositories setting on the + enterprise. This or the granular public/private/internal allowed + fields (but not both) must be provided. + """ + + members_can_create_repositories_policy_enabled = sgqlc.types.Field(Boolean, graphql_name="membersCanCreateRepositoriesPolicyEnabled") + """When false, allow member organizations to set their own repository + creation member privileges. + """ + + members_can_create_public_repositories = sgqlc.types.Field(Boolean, graphql_name="membersCanCreatePublicRepositories") + """Allow members to create public repositories. Defaults to current + value. + """ + + members_can_create_private_repositories = sgqlc.types.Field(Boolean, graphql_name="membersCanCreatePrivateRepositories") + """Allow members to create private repositories. Defaults to current + value. + """ + + members_can_create_internal_repositories = sgqlc.types.Field(Boolean, graphql_name="membersCanCreateInternalRepositories") + """Allow members to create internal repositories. Defaults to current + value. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateEnterpriseMembersCanDeleteIssuesSettingInput(sgqlc.types.Input): + """Autogenerated input type of + UpdateEnterpriseMembersCanDeleteIssuesSetting + """ + + __schema__ = github_schema + __field_names__ = ("enterprise_id", "setting_value", "client_mutation_id") + enterprise_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="enterpriseId") + """The ID of the enterprise on which to set the members can delete + issues setting. + """ + + setting_value = sgqlc.types.Field(sgqlc.types.non_null(EnterpriseEnabledDisabledSettingValue), graphql_name="settingValue") + """The value for the members can delete issues setting on the + enterprise. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateEnterpriseMembersCanDeleteRepositoriesSettingInput(sgqlc.types.Input): + """Autogenerated input type of + UpdateEnterpriseMembersCanDeleteRepositoriesSetting + """ + + __schema__ = github_schema + __field_names__ = ("enterprise_id", "setting_value", "client_mutation_id") + enterprise_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="enterpriseId") + """The ID of the enterprise on which to set the members can delete + repositories setting. + """ + + setting_value = sgqlc.types.Field(sgqlc.types.non_null(EnterpriseEnabledDisabledSettingValue), graphql_name="settingValue") + """The value for the members can delete repositories setting on the + enterprise. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateEnterpriseMembersCanInviteCollaboratorsSettingInput(sgqlc.types.Input): + """Autogenerated input type of + UpdateEnterpriseMembersCanInviteCollaboratorsSetting + """ + + __schema__ = github_schema + __field_names__ = ("enterprise_id", "setting_value", "client_mutation_id") + enterprise_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="enterpriseId") + """The ID of the enterprise on which to set the members can invite + collaborators setting. + """ + + setting_value = sgqlc.types.Field(sgqlc.types.non_null(EnterpriseEnabledDisabledSettingValue), graphql_name="settingValue") + """The value for the members can invite collaborators setting on the + enterprise. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateEnterpriseMembersCanMakePurchasesSettingInput(sgqlc.types.Input): + """Autogenerated input type of + UpdateEnterpriseMembersCanMakePurchasesSetting + """ + + __schema__ = github_schema + __field_names__ = ("enterprise_id", "setting_value", "client_mutation_id") + enterprise_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="enterpriseId") + """The ID of the enterprise on which to set the members can make + purchases setting. + """ + + setting_value = sgqlc.types.Field(sgqlc.types.non_null(EnterpriseMembersCanMakePurchasesSettingValue), graphql_name="settingValue") + """The value for the members can make purchases setting on the + enterprise. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateEnterpriseMembersCanUpdateProtectedBranchesSettingInput(sgqlc.types.Input): + """Autogenerated input type of + UpdateEnterpriseMembersCanUpdateProtectedBranchesSetting + """ + + __schema__ = github_schema + __field_names__ = ("enterprise_id", "setting_value", "client_mutation_id") + enterprise_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="enterpriseId") + """The ID of the enterprise on which to set the members can update + protected branches setting. + """ + + setting_value = sgqlc.types.Field(sgqlc.types.non_null(EnterpriseEnabledDisabledSettingValue), graphql_name="settingValue") + """The value for the members can update protected branches setting on + the enterprise. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateEnterpriseMembersCanViewDependencyInsightsSettingInput(sgqlc.types.Input): + """Autogenerated input type of + UpdateEnterpriseMembersCanViewDependencyInsightsSetting + """ + + __schema__ = github_schema + __field_names__ = ("enterprise_id", "setting_value", "client_mutation_id") + enterprise_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="enterpriseId") + """The ID of the enterprise on which to set the members can view + dependency insights setting. + """ + + setting_value = sgqlc.types.Field(sgqlc.types.non_null(EnterpriseEnabledDisabledSettingValue), graphql_name="settingValue") + """The value for the members can view dependency insights setting on + the enterprise. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateEnterpriseOrganizationProjectsSettingInput(sgqlc.types.Input): + """Autogenerated input type of + UpdateEnterpriseOrganizationProjectsSetting + """ + + __schema__ = github_schema + __field_names__ = ("enterprise_id", "setting_value", "client_mutation_id") + enterprise_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="enterpriseId") + """The ID of the enterprise on which to set the organization projects + setting. + """ + + setting_value = sgqlc.types.Field(sgqlc.types.non_null(EnterpriseEnabledDisabledSettingValue), graphql_name="settingValue") + """The value for the organization projects setting on the enterprise.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateEnterpriseOwnerOrganizationRoleInput(sgqlc.types.Input): + """Autogenerated input type of UpdateEnterpriseOwnerOrganizationRole""" + + __schema__ = github_schema + __field_names__ = ("enterprise_id", "organization_id", "organization_role", "client_mutation_id") + enterprise_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="enterpriseId") + """The ID of the Enterprise which the owner belongs to.""" + + organization_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="organizationId") + """The ID of the organization for membership change.""" + + organization_role = sgqlc.types.Field(sgqlc.types.non_null(RoleInOrganization), graphql_name="organizationRole") + """The role to assume in the organization.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateEnterpriseProfileInput(sgqlc.types.Input): + """Autogenerated input type of UpdateEnterpriseProfile""" + + __schema__ = github_schema + __field_names__ = ("enterprise_id", "name", "description", "website_url", "location", "client_mutation_id") + enterprise_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="enterpriseId") + """The Enterprise ID to update.""" + + name = sgqlc.types.Field(String, graphql_name="name") + """The name of the enterprise.""" + + description = sgqlc.types.Field(String, graphql_name="description") + """The description of the enterprise.""" + + website_url = sgqlc.types.Field(String, graphql_name="websiteUrl") + """The URL of the enterprise's website.""" + + location = sgqlc.types.Field(String, graphql_name="location") + """The location of the enterprise.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateEnterpriseRepositoryProjectsSettingInput(sgqlc.types.Input): + """Autogenerated input type of + UpdateEnterpriseRepositoryProjectsSetting + """ + + __schema__ = github_schema + __field_names__ = ("enterprise_id", "setting_value", "client_mutation_id") + enterprise_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="enterpriseId") + """The ID of the enterprise on which to set the repository projects + setting. + """ + + setting_value = sgqlc.types.Field(sgqlc.types.non_null(EnterpriseEnabledDisabledSettingValue), graphql_name="settingValue") + """The value for the repository projects setting on the enterprise.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateEnterpriseTeamDiscussionsSettingInput(sgqlc.types.Input): + """Autogenerated input type of UpdateEnterpriseTeamDiscussionsSetting""" + + __schema__ = github_schema + __field_names__ = ("enterprise_id", "setting_value", "client_mutation_id") + enterprise_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="enterpriseId") + """The ID of the enterprise on which to set the team discussions + setting. + """ + + setting_value = sgqlc.types.Field(sgqlc.types.non_null(EnterpriseEnabledDisabledSettingValue), graphql_name="settingValue") + """The value for the team discussions setting on the enterprise.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateEnterpriseTwoFactorAuthenticationRequiredSettingInput(sgqlc.types.Input): + """Autogenerated input type of + UpdateEnterpriseTwoFactorAuthenticationRequiredSetting + """ + + __schema__ = github_schema + __field_names__ = ("enterprise_id", "setting_value", "client_mutation_id") + enterprise_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="enterpriseId") + """The ID of the enterprise on which to set the two factor + authentication required setting. + """ + + setting_value = sgqlc.types.Field(sgqlc.types.non_null(EnterpriseEnabledSettingValue), graphql_name="settingValue") + """The value for the two factor authentication required setting on + the enterprise. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateEnvironmentInput(sgqlc.types.Input): + """Autogenerated input type of UpdateEnvironment""" + + __schema__ = github_schema + __field_names__ = ("environment_id", "wait_timer", "reviewers", "client_mutation_id") + environment_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="environmentId") + """The node ID of the environment.""" + + wait_timer = sgqlc.types.Field(Int, graphql_name="waitTimer") + """The wait timer in minutes.""" + + reviewers = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(ID)), graphql_name="reviewers") + """The ids of users or teams that can approve deployments to this + environment + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateIpAllowListEnabledSettingInput(sgqlc.types.Input): + """Autogenerated input type of UpdateIpAllowListEnabledSetting""" + + __schema__ = github_schema + __field_names__ = ("owner_id", "setting_value", "client_mutation_id") + owner_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="ownerId") + """The ID of the owner on which to set the IP allow list enabled + setting. + """ + + setting_value = sgqlc.types.Field(sgqlc.types.non_null(IpAllowListEnabledSettingValue), graphql_name="settingValue") + """The value for the IP allow list enabled setting.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateIpAllowListEntryInput(sgqlc.types.Input): + """Autogenerated input type of UpdateIpAllowListEntry""" + + __schema__ = github_schema + __field_names__ = ("ip_allow_list_entry_id", "allow_list_value", "name", "is_active", "client_mutation_id") + ip_allow_list_entry_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="ipAllowListEntryId") + """The ID of the IP allow list entry to update.""" + + allow_list_value = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="allowListValue") + """An IP address or range of addresses in CIDR notation.""" + + name = sgqlc.types.Field(String, graphql_name="name") + """An optional name for the IP allow list entry.""" + + is_active = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isActive") + """Whether the IP allow list entry is active when an IP allow list is + enabled. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateIpAllowListForInstalledAppsEnabledSettingInput(sgqlc.types.Input): + """Autogenerated input type of + UpdateIpAllowListForInstalledAppsEnabledSetting + """ + + __schema__ = github_schema + __field_names__ = ("owner_id", "setting_value", "client_mutation_id") + owner_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="ownerId") + """The ID of the owner.""" + + setting_value = sgqlc.types.Field(sgqlc.types.non_null(IpAllowListForInstalledAppsEnabledSettingValue), graphql_name="settingValue") + """The value for the IP allow list configuration for installed GitHub + Apps setting. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateIssueCommentInput(sgqlc.types.Input): + """Autogenerated input type of UpdateIssueComment""" + + __schema__ = github_schema + __field_names__ = ("id", "body", "client_mutation_id") + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + """The ID of the IssueComment to modify.""" + + body = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="body") + """The updated text of the comment.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateIssueInput(sgqlc.types.Input): + """Autogenerated input type of UpdateIssue""" + + __schema__ = github_schema + __field_names__ = ("id", "title", "body", "assignee_ids", "milestone_id", "label_ids", "state", "project_ids", "client_mutation_id") + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + """The ID of the Issue to modify.""" + + title = sgqlc.types.Field(String, graphql_name="title") + """The title for the issue.""" + + body = sgqlc.types.Field(String, graphql_name="body") + """The body for the issue description.""" + + assignee_ids = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(ID)), graphql_name="assigneeIds") + """An array of Node IDs of users for this issue.""" + + milestone_id = sgqlc.types.Field(ID, graphql_name="milestoneId") + """The Node ID of the milestone for this issue.""" + + label_ids = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(ID)), graphql_name="labelIds") + """An array of Node IDs of labels for this issue.""" + + state = sgqlc.types.Field(IssueState, graphql_name="state") + """The desired issue state.""" + + project_ids = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(ID)), graphql_name="projectIds") + """An array of Node IDs for projects associated with this issue.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateNotificationRestrictionSettingInput(sgqlc.types.Input): + """Autogenerated input type of UpdateNotificationRestrictionSetting""" + + __schema__ = github_schema + __field_names__ = ("owner_id", "setting_value", "client_mutation_id") + owner_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="ownerId") + """The ID of the owner on which to set the restrict notifications + setting. + """ + + setting_value = sgqlc.types.Field(sgqlc.types.non_null(NotificationRestrictionSettingValue), graphql_name="settingValue") + """The value for the restrict notifications setting.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateOrganizationAllowPrivateRepositoryForkingSettingInput(sgqlc.types.Input): + """Autogenerated input type of + UpdateOrganizationAllowPrivateRepositoryForkingSetting + """ + + __schema__ = github_schema + __field_names__ = ("organization_id", "forking_enabled", "client_mutation_id") + organization_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="organizationId") + """The ID of the organization on which to set the allow private + repository forking setting. + """ + + forking_enabled = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="forkingEnabled") + """Enable forking of private repositories in the organization?""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateProjectCardInput(sgqlc.types.Input): + """Autogenerated input type of UpdateProjectCard""" + + __schema__ = github_schema + __field_names__ = ("project_card_id", "is_archived", "note", "client_mutation_id") + project_card_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="projectCardId") + """The ProjectCard ID to update.""" + + is_archived = sgqlc.types.Field(Boolean, graphql_name="isArchived") + """Whether or not the ProjectCard should be archived""" + + note = sgqlc.types.Field(String, graphql_name="note") + """The note of ProjectCard.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateProjectColumnInput(sgqlc.types.Input): + """Autogenerated input type of UpdateProjectColumn""" + + __schema__ = github_schema + __field_names__ = ("project_column_id", "name", "client_mutation_id") + project_column_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="projectColumnId") + """The ProjectColumn ID to update.""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The name of project column.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateProjectDraftIssueInput(sgqlc.types.Input): + """Autogenerated input type of UpdateProjectDraftIssue""" + + __schema__ = github_schema + __field_names__ = ("draft_issue_id", "title", "body", "assignee_ids", "client_mutation_id") + draft_issue_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="draftIssueId") + """The ID of the draft issue to update.""" + + title = sgqlc.types.Field(String, graphql_name="title") + """The title of the draft issue.""" + + body = sgqlc.types.Field(String, graphql_name="body") + """The body of the draft issue.""" + + assignee_ids = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(ID)), graphql_name="assigneeIds") + """The IDs of the assignees of the draft issue.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateProjectInput(sgqlc.types.Input): + """Autogenerated input type of UpdateProject""" + + __schema__ = github_schema + __field_names__ = ("project_id", "name", "body", "state", "public", "client_mutation_id") + project_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="projectId") + """The Project ID to update.""" + + name = sgqlc.types.Field(String, graphql_name="name") + """The name of project.""" + + body = sgqlc.types.Field(String, graphql_name="body") + """The description of project.""" + + state = sgqlc.types.Field(ProjectState, graphql_name="state") + """Whether the project is open or closed.""" + + public = sgqlc.types.Field(Boolean, graphql_name="public") + """Whether the project is public or not.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateProjectNextInput(sgqlc.types.Input): + """Autogenerated input type of UpdateProjectNext""" + + __schema__ = github_schema + __field_names__ = ("project_id", "title", "description", "short_description", "closed", "public", "client_mutation_id") + project_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="projectId") + """The ID of the Project to update.""" + + title = sgqlc.types.Field(String, graphql_name="title") + """Set the title of the project.""" + + description = sgqlc.types.Field(String, graphql_name="description") + """Set the readme description of the project.""" + + short_description = sgqlc.types.Field(String, graphql_name="shortDescription") + """Set the short description of the project.""" + + closed = sgqlc.types.Field(Boolean, graphql_name="closed") + """Set the project to closed or open.""" + + public = sgqlc.types.Field(Boolean, graphql_name="public") + """Set the project to public or private.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateProjectNextItemFieldInput(sgqlc.types.Input): + """Autogenerated input type of UpdateProjectNextItemField""" + + __schema__ = github_schema + __field_names__ = ("project_id", "item_id", "field_id", "value", "client_mutation_id") + project_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="projectId") + """The ID of the Project.""" + + item_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="itemId") + """The id of the item to be updated.""" + + field_id = sgqlc.types.Field(ID, graphql_name="fieldId") + """The id of the field to be updated.""" + + value = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="value") + """The value which will be set on the field.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdatePullRequestBranchInput(sgqlc.types.Input): + """Autogenerated input type of UpdatePullRequestBranch""" + + __schema__ = github_schema + __field_names__ = ("pull_request_id", "expected_head_oid", "client_mutation_id") + pull_request_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="pullRequestId") + """The Node ID of the pull request.""" + + expected_head_oid = sgqlc.types.Field(GitObjectID, graphql_name="expectedHeadOid") + """The head ref oid for the upstream branch.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdatePullRequestInput(sgqlc.types.Input): + """Autogenerated input type of UpdatePullRequest""" + + __schema__ = github_schema + __field_names__ = ( + "pull_request_id", + "base_ref_name", + "title", + "body", + "state", + "maintainer_can_modify", + "assignee_ids", + "milestone_id", + "label_ids", + "project_ids", + "client_mutation_id", + ) + pull_request_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="pullRequestId") + """The Node ID of the pull request.""" + + base_ref_name = sgqlc.types.Field(String, graphql_name="baseRefName") + """The name of the branch you want your changes pulled into. This + should be an existing branch on the current repository. + """ + + title = sgqlc.types.Field(String, graphql_name="title") + """The title of the pull request.""" + + body = sgqlc.types.Field(String, graphql_name="body") + """The contents of the pull request.""" + + state = sgqlc.types.Field(PullRequestUpdateState, graphql_name="state") + """The target state of the pull request.""" + + maintainer_can_modify = sgqlc.types.Field(Boolean, graphql_name="maintainerCanModify") + """Indicates whether maintainers can modify the pull request.""" + + assignee_ids = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(ID)), graphql_name="assigneeIds") + """An array of Node IDs of users for this pull request.""" + + milestone_id = sgqlc.types.Field(ID, graphql_name="milestoneId") + """The Node ID of the milestone for this pull request.""" + + label_ids = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(ID)), graphql_name="labelIds") + """An array of Node IDs of labels for this pull request.""" + + project_ids = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(ID)), graphql_name="projectIds") + """An array of Node IDs for projects associated with this pull + request. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdatePullRequestReviewCommentInput(sgqlc.types.Input): + """Autogenerated input type of UpdatePullRequestReviewComment""" + + __schema__ = github_schema + __field_names__ = ("pull_request_review_comment_id", "body", "client_mutation_id") + pull_request_review_comment_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="pullRequestReviewCommentId") + """The Node ID of the comment to modify.""" + + body = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="body") + """The text of the comment.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdatePullRequestReviewInput(sgqlc.types.Input): + """Autogenerated input type of UpdatePullRequestReview""" + + __schema__ = github_schema + __field_names__ = ("pull_request_review_id", "body", "client_mutation_id") + pull_request_review_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="pullRequestReviewId") + """The Node ID of the pull request review to modify.""" + + body = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="body") + """The contents of the pull request review body.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateRefInput(sgqlc.types.Input): + """Autogenerated input type of UpdateRef""" + + __schema__ = github_schema + __field_names__ = ("ref_id", "oid", "force", "client_mutation_id") + ref_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="refId") + """The Node ID of the Ref to be updated.""" + + oid = sgqlc.types.Field(sgqlc.types.non_null(GitObjectID), graphql_name="oid") + """The GitObjectID that the Ref shall be updated to target.""" + + force = sgqlc.types.Field(Boolean, graphql_name="force") + """Permit updates of branch Refs that are not fast-forwards?""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateRepositoryInput(sgqlc.types.Input): + """Autogenerated input type of UpdateRepository""" + + __schema__ = github_schema + __field_names__ = ( + "repository_id", + "name", + "description", + "template", + "homepage_url", + "has_wiki_enabled", + "has_issues_enabled", + "has_projects_enabled", + "client_mutation_id", + ) + repository_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="repositoryId") + """The ID of the repository to update.""" + + name = sgqlc.types.Field(String, graphql_name="name") + """The new name of the repository.""" + + description = sgqlc.types.Field(String, graphql_name="description") + """A new description for the repository. Pass an empty string to + erase the existing description. + """ + + template = sgqlc.types.Field(Boolean, graphql_name="template") + """Whether this repository should be marked as a template such that + anyone who can access it can create new repositories with the same + files and directory structure. + """ + + homepage_url = sgqlc.types.Field(URI, graphql_name="homepageUrl") + """The URL for a web page about this repository. Pass an empty string + to erase the existing URL. + """ + + has_wiki_enabled = sgqlc.types.Field(Boolean, graphql_name="hasWikiEnabled") + """Indicates if the repository should have the wiki feature enabled.""" + + has_issues_enabled = sgqlc.types.Field(Boolean, graphql_name="hasIssuesEnabled") + """Indicates if the repository should have the issues feature + enabled. + """ + + has_projects_enabled = sgqlc.types.Field(Boolean, graphql_name="hasProjectsEnabled") + """Indicates if the repository should have the project boards feature + enabled. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateSponsorshipPreferencesInput(sgqlc.types.Input): + """Autogenerated input type of UpdateSponsorshipPreferences""" + + __schema__ = github_schema + __field_names__ = ( + "sponsor_id", + "sponsor_login", + "sponsorable_id", + "sponsorable_login", + "receive_emails", + "privacy_level", + "client_mutation_id", + ) + sponsor_id = sgqlc.types.Field(ID, graphql_name="sponsorId") + """The ID of the user or organization who is acting as the sponsor, + paying for the sponsorship. Required if sponsorLogin is not given. + """ + + sponsor_login = sgqlc.types.Field(String, graphql_name="sponsorLogin") + """The username of the user or organization who is acting as the + sponsor, paying for the sponsorship. Required if sponsorId is not + given. + """ + + sponsorable_id = sgqlc.types.Field(ID, graphql_name="sponsorableId") + """The ID of the user or organization who is receiving the + sponsorship. Required if sponsorableLogin is not given. + """ + + sponsorable_login = sgqlc.types.Field(String, graphql_name="sponsorableLogin") + """The username of the user or organization who is receiving the + sponsorship. Required if sponsorableId is not given. + """ + + receive_emails = sgqlc.types.Field(Boolean, graphql_name="receiveEmails") + """Whether the sponsor should receive email updates from the + sponsorable. + """ + + privacy_level = sgqlc.types.Field(SponsorshipPrivacy, graphql_name="privacyLevel") + """Specify whether others should be able to see that the sponsor is + sponsoring the sponsorable. Public visibility still does not + reveal which tier is used. + """ + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateSubscriptionInput(sgqlc.types.Input): + """Autogenerated input type of UpdateSubscription""" + + __schema__ = github_schema + __field_names__ = ("subscribable_id", "state", "client_mutation_id") + subscribable_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="subscribableId") + """The Node ID of the subscribable object to modify.""" + + state = sgqlc.types.Field(sgqlc.types.non_null(SubscriptionState), graphql_name="state") + """The new state of the subscription.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateTeamDiscussionCommentInput(sgqlc.types.Input): + """Autogenerated input type of UpdateTeamDiscussionComment""" + + __schema__ = github_schema + __field_names__ = ("id", "body", "body_version", "client_mutation_id") + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + """The ID of the comment to modify.""" + + body = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="body") + """The updated text of the comment.""" + + body_version = sgqlc.types.Field(String, graphql_name="bodyVersion") + """The current version of the body content.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateTeamDiscussionInput(sgqlc.types.Input): + """Autogenerated input type of UpdateTeamDiscussion""" + + __schema__ = github_schema + __field_names__ = ("id", "title", "body", "body_version", "pinned", "client_mutation_id") + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + """The Node ID of the discussion to modify.""" + + title = sgqlc.types.Field(String, graphql_name="title") + """The updated title of the discussion.""" + + body = sgqlc.types.Field(String, graphql_name="body") + """The updated text of the discussion.""" + + body_version = sgqlc.types.Field(String, graphql_name="bodyVersion") + """The current version of the body content. If provided, this update + operation will be rejected if the given version does not match the + latest version on the server. + """ + + pinned = sgqlc.types.Field(Boolean, graphql_name="pinned") + """If provided, sets the pinned state of the updated discussion.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateTeamsRepositoryInput(sgqlc.types.Input): + """Autogenerated input type of UpdateTeamsRepository""" + + __schema__ = github_schema + __field_names__ = ("repository_id", "team_ids", "permission", "client_mutation_id") + repository_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="repositoryId") + """Repository ID being granted access to.""" + + team_ids = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(ID))), graphql_name="teamIds") + """A list of teams being granted access. Limit: 10""" + + permission = sgqlc.types.Field(sgqlc.types.non_null(RepositoryPermission), graphql_name="permission") + """Permission that should be granted to the teams.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateTopicsInput(sgqlc.types.Input): + """Autogenerated input type of UpdateTopics""" + + __schema__ = github_schema + __field_names__ = ("repository_id", "topic_names", "client_mutation_id") + repository_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="repositoryId") + """The Node ID of the repository.""" + + topic_names = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(String))), graphql_name="topicNames") + """An array of topic names.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UserStatusOrder(sgqlc.types.Input): + """Ordering options for user status connections.""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(UserStatusOrderField), graphql_name="field") + """The field to order user statuses by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The ordering direction.""" + + +class VerifiableDomainOrder(sgqlc.types.Input): + """Ordering options for verifiable domain connections.""" + + __schema__ = github_schema + __field_names__ = ("field", "direction") + field = sgqlc.types.Field(sgqlc.types.non_null(VerifiableDomainOrderField), graphql_name="field") + """The field to order verifiable domains by.""" + + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The ordering direction.""" + + +class VerifyVerifiableDomainInput(sgqlc.types.Input): + """Autogenerated input type of VerifyVerifiableDomain""" + + __schema__ = github_schema + __field_names__ = ("id", "client_mutation_id") + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + """The ID of the verifiable domain to verify.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +######################################################################## +# Output Objects and Interfaces +######################################################################## +class AbortQueuedMigrationsPayload(sgqlc.types.Type): + """Autogenerated return type of AbortQueuedMigrations""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "success") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + success = sgqlc.types.Field(Boolean, graphql_name="success") + """Did the operation succeed?""" + + +class AcceptEnterpriseAdministratorInvitationPayload(sgqlc.types.Type): + """Autogenerated return type of + AcceptEnterpriseAdministratorInvitation + """ + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "invitation", "message") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + invitation = sgqlc.types.Field("EnterpriseAdministratorInvitation", graphql_name="invitation") + """The invitation that was accepted.""" + + message = sgqlc.types.Field(String, graphql_name="message") + """A message confirming the result of accepting an administrator + invitation. + """ + + +class AcceptTopicSuggestionPayload(sgqlc.types.Type): + """Autogenerated return type of AcceptTopicSuggestion""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "topic") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + topic = sgqlc.types.Field("Topic", graphql_name="topic") + """The accepted topic.""" + + +class Actor(sgqlc.types.Interface): + """Represents an object which can take actions on GitHub. Typically a + User or Bot. + """ + + __schema__ = github_schema + __field_names__ = ("avatar_url", "login", "resource_path", "url") + avatar_url = sgqlc.types.Field( + sgqlc.types.non_null(URI), + graphql_name="avatarUrl", + args=sgqlc.types.ArgDict((("size", sgqlc.types.Arg(Int, graphql_name="size", default=None)),)), + ) + """A URL pointing to the actor's public avatar. + + Arguments: + + * `size` (`Int`): The size of the resulting square image. + """ + + login = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="login") + """The username of the actor.""" + + resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="resourcePath") + """The HTTP path for this actor.""" + + url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="url") + """The HTTP URL for this actor.""" + + +class ActorLocation(sgqlc.types.Type): + """Location information for an actor""" + + __schema__ = github_schema + __field_names__ = ("city", "country", "country_code", "region", "region_code") + city = sgqlc.types.Field(String, graphql_name="city") + """City""" + + country = sgqlc.types.Field(String, graphql_name="country") + """Country name""" + + country_code = sgqlc.types.Field(String, graphql_name="countryCode") + """Country code""" + + region = sgqlc.types.Field(String, graphql_name="region") + """Region name""" + + region_code = sgqlc.types.Field(String, graphql_name="regionCode") + """Region or state code""" + + +class AddAssigneesToAssignablePayload(sgqlc.types.Type): + """Autogenerated return type of AddAssigneesToAssignable""" + + __schema__ = github_schema + __field_names__ = ("assignable", "client_mutation_id") + assignable = sgqlc.types.Field("Assignable", graphql_name="assignable") + """The item that was assigned.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class AddCommentPayload(sgqlc.types.Type): + """Autogenerated return type of AddComment""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "comment_edge", "subject", "timeline_edge") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + comment_edge = sgqlc.types.Field("IssueCommentEdge", graphql_name="commentEdge") + """The edge from the subject's comment connection.""" + + subject = sgqlc.types.Field("Node", graphql_name="subject") + """The subject""" + + timeline_edge = sgqlc.types.Field("IssueTimelineItemEdge", graphql_name="timelineEdge") + """The edge from the subject's timeline connection.""" + + +class AddDiscussionCommentPayload(sgqlc.types.Type): + """Autogenerated return type of AddDiscussionComment""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "comment") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + comment = sgqlc.types.Field("DiscussionComment", graphql_name="comment") + """The newly created discussion comment.""" + + +class AddDiscussionPollVotePayload(sgqlc.types.Type): + """Autogenerated return type of AddDiscussionPollVote""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "poll_option") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + poll_option = sgqlc.types.Field("DiscussionPollOption", graphql_name="pollOption") + """The poll option that a vote was added to.""" + + +class AddEnterpriseSupportEntitlementPayload(sgqlc.types.Type): + """Autogenerated return type of AddEnterpriseSupportEntitlement""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "message") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + message = sgqlc.types.Field(String, graphql_name="message") + """A message confirming the result of adding the support entitlement.""" + + +class AddLabelsToLabelablePayload(sgqlc.types.Type): + """Autogenerated return type of AddLabelsToLabelable""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "labelable") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + labelable = sgqlc.types.Field("Labelable", graphql_name="labelable") + """The item that was labeled.""" + + +class AddProjectCardPayload(sgqlc.types.Type): + """Autogenerated return type of AddProjectCard""" + + __schema__ = github_schema + __field_names__ = ("card_edge", "client_mutation_id", "project_column") + card_edge = sgqlc.types.Field("ProjectCardEdge", graphql_name="cardEdge") + """The edge from the ProjectColumn's card connection.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + project_column = sgqlc.types.Field("ProjectColumn", graphql_name="projectColumn") + """The ProjectColumn""" + + +class AddProjectColumnPayload(sgqlc.types.Type): + """Autogenerated return type of AddProjectColumn""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "column_edge", "project") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + column_edge = sgqlc.types.Field("ProjectColumnEdge", graphql_name="columnEdge") + """The edge from the project's column connection.""" + + project = sgqlc.types.Field("Project", graphql_name="project") + """The project""" + + +class AddProjectDraftIssuePayload(sgqlc.types.Type): + """Autogenerated return type of AddProjectDraftIssue""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "project_next_item") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + project_next_item = sgqlc.types.Field("ProjectNextItem", graphql_name="projectNextItem") + """The draft issue added to the project.""" + + +class AddProjectNextItemPayload(sgqlc.types.Type): + """Autogenerated return type of AddProjectNextItem""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "project_next_item") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + project_next_item = sgqlc.types.Field("ProjectNextItem", graphql_name="projectNextItem") + """The item added to the project.""" + + +class AddPullRequestReviewCommentPayload(sgqlc.types.Type): + """Autogenerated return type of AddPullRequestReviewComment""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "comment", "comment_edge") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + comment = sgqlc.types.Field("PullRequestReviewComment", graphql_name="comment") + """The newly created comment.""" + + comment_edge = sgqlc.types.Field("PullRequestReviewCommentEdge", graphql_name="commentEdge") + """The edge from the review's comment connection.""" + + +class AddPullRequestReviewPayload(sgqlc.types.Type): + """Autogenerated return type of AddPullRequestReview""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "pull_request_review", "review_edge") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + pull_request_review = sgqlc.types.Field("PullRequestReview", graphql_name="pullRequestReview") + """The newly created pull request review.""" + + review_edge = sgqlc.types.Field("PullRequestReviewEdge", graphql_name="reviewEdge") + """The edge from the pull request's review connection.""" + + +class AddPullRequestReviewThreadPayload(sgqlc.types.Type): + """Autogenerated return type of AddPullRequestReviewThread""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "thread") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + thread = sgqlc.types.Field("PullRequestReviewThread", graphql_name="thread") + """The newly created thread.""" + + +class AddReactionPayload(sgqlc.types.Type): + """Autogenerated return type of AddReaction""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "reaction", "subject") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + reaction = sgqlc.types.Field("Reaction", graphql_name="reaction") + """The reaction object.""" + + subject = sgqlc.types.Field("Reactable", graphql_name="subject") + """The reactable subject.""" + + +class AddStarPayload(sgqlc.types.Type): + """Autogenerated return type of AddStar""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "starrable") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + starrable = sgqlc.types.Field("Starrable", graphql_name="starrable") + """The starrable.""" + + +class AddUpvotePayload(sgqlc.types.Type): + """Autogenerated return type of AddUpvote""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "subject") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + subject = sgqlc.types.Field("Votable", graphql_name="subject") + """The votable subject.""" + + +class AddVerifiableDomainPayload(sgqlc.types.Type): + """Autogenerated return type of AddVerifiableDomain""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "domain") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + domain = sgqlc.types.Field("VerifiableDomain", graphql_name="domain") + """The verifiable domain that was added.""" + + +class ApproveDeploymentsPayload(sgqlc.types.Type): + """Autogenerated return type of ApproveDeployments""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "deployments") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + deployments = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null("Deployment")), graphql_name="deployments") + """The affected deployments.""" + + +class ApproveVerifiableDomainPayload(sgqlc.types.Type): + """Autogenerated return type of ApproveVerifiableDomain""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "domain") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + domain = sgqlc.types.Field("VerifiableDomain", graphql_name="domain") + """The verifiable domain that was approved.""" + + +class ArchiveRepositoryPayload(sgqlc.types.Type): + """Autogenerated return type of ArchiveRepository""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "repository") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + repository = sgqlc.types.Field("Repository", graphql_name="repository") + """The repository that was marked as archived.""" + + +class Assignable(sgqlc.types.Interface): + """An object that can have users assigned to it.""" + + __schema__ = github_schema + __field_names__ = ("assignees",) + assignees = sgqlc.types.Field( + sgqlc.types.non_null("UserConnection"), + graphql_name="assignees", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of Users assigned to this object. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + +class AuditEntry(sgqlc.types.Interface): + """An entry in the audit log.""" + + __schema__ = github_schema + __field_names__ = ( + "action", + "actor", + "actor_ip", + "actor_location", + "actor_login", + "actor_resource_path", + "actor_url", + "created_at", + "operation_type", + "user", + "user_login", + "user_resource_path", + "user_url", + ) + action = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="action") + """The action name""" + + actor = sgqlc.types.Field("AuditEntryActor", graphql_name="actor") + """The user who initiated the action""" + + actor_ip = sgqlc.types.Field(String, graphql_name="actorIp") + """The IP address of the actor""" + + actor_location = sgqlc.types.Field(ActorLocation, graphql_name="actorLocation") + """A readable representation of the actor's location""" + + actor_login = sgqlc.types.Field(String, graphql_name="actorLogin") + """The username of the user who initiated the action""" + + actor_resource_path = sgqlc.types.Field(URI, graphql_name="actorResourcePath") + """The HTTP path for the actor.""" + + actor_url = sgqlc.types.Field(URI, graphql_name="actorUrl") + """The HTTP URL for the actor.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(PreciseDateTime), graphql_name="createdAt") + """The time the action was initiated""" + + operation_type = sgqlc.types.Field(OperationType, graphql_name="operationType") + """The corresponding operation type for the action""" + + user = sgqlc.types.Field("User", graphql_name="user") + """The user affected by the action""" + + user_login = sgqlc.types.Field(String, graphql_name="userLogin") + """For actions involving two users, the actor is the initiator and + the user is the affected user. + """ + + user_resource_path = sgqlc.types.Field(URI, graphql_name="userResourcePath") + """The HTTP path for the user.""" + + user_url = sgqlc.types.Field(URI, graphql_name="userUrl") + """The HTTP URL for the user.""" + + +class AutoMergeRequest(sgqlc.types.Type): + """Represents an auto-merge request for a pull request""" + + __schema__ = github_schema + __field_names__ = ("author_email", "commit_body", "commit_headline", "enabled_at", "enabled_by", "merge_method", "pull_request") + author_email = sgqlc.types.Field(String, graphql_name="authorEmail") + """The email address of the author of this auto-merge request.""" + + commit_body = sgqlc.types.Field(String, graphql_name="commitBody") + """The commit message of the auto-merge request. If a merge queue is + required by the base branch, this value will be set by the merge + queue when merging. + """ + + commit_headline = sgqlc.types.Field(String, graphql_name="commitHeadline") + """The commit title of the auto-merge request. If a merge queue is + required by the base branch, this value will be set by the merge + queue when merging + """ + + enabled_at = sgqlc.types.Field(DateTime, graphql_name="enabledAt") + """When was this auto-merge request was enabled.""" + + enabled_by = sgqlc.types.Field(Actor, graphql_name="enabledBy") + """The actor who created the auto-merge request.""" + + merge_method = sgqlc.types.Field(sgqlc.types.non_null(PullRequestMergeMethod), graphql_name="mergeMethod") + """The merge method of the auto-merge request. If a merge queue is + required by the base branch, this value will be set by the merge + queue when merging. + """ + + pull_request = sgqlc.types.Field(sgqlc.types.non_null("PullRequest"), graphql_name="pullRequest") + """The pull request that this auto-merge request is set against.""" + + +class Blame(sgqlc.types.Type): + """Represents a Git blame.""" + + __schema__ = github_schema + __field_names__ = ("ranges",) + ranges = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null("BlameRange"))), graphql_name="ranges") + """The list of ranges from a Git blame.""" + + +class BlameRange(sgqlc.types.Type): + """Represents a range of information from a Git blame.""" + + __schema__ = github_schema + __field_names__ = ("age", "commit", "ending_line", "starting_line") + age = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="age") + """Identifies the recency of the change, from 1 (new) to 10 (old). + This is calculated as a 2-quantile and determines the length of + distance between the median age of all the changes in the file and + the recency of the current range's change. + """ + + commit = sgqlc.types.Field(sgqlc.types.non_null("Commit"), graphql_name="commit") + """Identifies the line author""" + + ending_line = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="endingLine") + """The ending line for the range""" + + starting_line = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="startingLine") + """The starting line for the range""" + + +class BranchProtectionRuleConflict(sgqlc.types.Type): + """A conflict between two branch protection rules.""" + + __schema__ = github_schema + __field_names__ = ("branch_protection_rule", "conflicting_branch_protection_rule", "ref") + branch_protection_rule = sgqlc.types.Field("BranchProtectionRule", graphql_name="branchProtectionRule") + """Identifies the branch protection rule.""" + + conflicting_branch_protection_rule = sgqlc.types.Field("BranchProtectionRule", graphql_name="conflictingBranchProtectionRule") + """Identifies the conflicting branch protection rule.""" + + ref = sgqlc.types.Field("Ref", graphql_name="ref") + """Identifies the branch ref that has conflicting rules""" + + +class BranchProtectionRuleConflictConnection(sgqlc.types.relay.Connection): + """The connection type for BranchProtectionRuleConflict.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("BranchProtectionRuleConflictEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of(BranchProtectionRuleConflict), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class BranchProtectionRuleConflictEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field(BranchProtectionRuleConflict, graphql_name="node") + """The item at the end of the edge.""" + + +class BranchProtectionRuleConnection(sgqlc.types.relay.Connection): + """The connection type for BranchProtectionRule.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("BranchProtectionRuleEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("BranchProtectionRule"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class BranchProtectionRuleEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("BranchProtectionRule", graphql_name="node") + """The item at the end of the edge.""" + + +class BypassForcePushAllowanceConnection(sgqlc.types.relay.Connection): + """The connection type for BypassForcePushAllowance.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("BypassForcePushAllowanceEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("BypassForcePushAllowance"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class BypassForcePushAllowanceEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("BypassForcePushAllowance", graphql_name="node") + """The item at the end of the edge.""" + + +class BypassPullRequestAllowanceConnection(sgqlc.types.relay.Connection): + """The connection type for BypassPullRequestAllowance.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("BypassPullRequestAllowanceEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("BypassPullRequestAllowance"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class BypassPullRequestAllowanceEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("BypassPullRequestAllowance", graphql_name="node") + """The item at the end of the edge.""" + + +class CVSS(sgqlc.types.Type): + """The Common Vulnerability Scoring System""" + + __schema__ = github_schema + __field_names__ = ("score", "vector_string") + score = sgqlc.types.Field(sgqlc.types.non_null(Float), graphql_name="score") + """The CVSS score associated with this advisory""" + + vector_string = sgqlc.types.Field(String, graphql_name="vectorString") + """The CVSS vector string associated with this advisory""" + + +class CWEConnection(sgqlc.types.relay.Connection): + """The connection type for CWE.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("CWEEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("CWE"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class CWEEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("CWE", graphql_name="node") + """The item at the end of the edge.""" + + +class CancelEnterpriseAdminInvitationPayload(sgqlc.types.Type): + """Autogenerated return type of CancelEnterpriseAdminInvitation""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "invitation", "message") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + invitation = sgqlc.types.Field("EnterpriseAdministratorInvitation", graphql_name="invitation") + """The invitation that was canceled.""" + + message = sgqlc.types.Field(String, graphql_name="message") + """A message confirming the result of canceling an administrator + invitation. + """ + + +class CancelSponsorshipPayload(sgqlc.types.Type): + """Autogenerated return type of CancelSponsorship""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "sponsors_tier") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + sponsors_tier = sgqlc.types.Field("SponsorsTier", graphql_name="sponsorsTier") + """The tier that was being used at the time of cancellation.""" + + +class ChangeUserStatusPayload(sgqlc.types.Type): + """Autogenerated return type of ChangeUserStatus""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "status") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + status = sgqlc.types.Field("UserStatus", graphql_name="status") + """Your updated status.""" + + +class CheckAnnotation(sgqlc.types.Type): + """A single check annotation.""" + + __schema__ = github_schema + __field_names__ = ("annotation_level", "blob_url", "database_id", "location", "message", "path", "raw_details", "title") + annotation_level = sgqlc.types.Field(CheckAnnotationLevel, graphql_name="annotationLevel") + """The annotation's severity level.""" + + blob_url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="blobUrl") + """The path to the file that this annotation was made on.""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + location = sgqlc.types.Field(sgqlc.types.non_null("CheckAnnotationSpan"), graphql_name="location") + """The position of this annotation.""" + + message = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="message") + """The annotation's message.""" + + path = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="path") + """The path that this annotation was made on.""" + + raw_details = sgqlc.types.Field(String, graphql_name="rawDetails") + """Additional information about the annotation.""" + + title = sgqlc.types.Field(String, graphql_name="title") + """The annotation's title""" + + +class CheckAnnotationConnection(sgqlc.types.relay.Connection): + """The connection type for CheckAnnotation.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("CheckAnnotationEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of(CheckAnnotation), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class CheckAnnotationEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field(CheckAnnotation, graphql_name="node") + """The item at the end of the edge.""" + + +class CheckAnnotationPosition(sgqlc.types.Type): + """A character position in a check annotation.""" + + __schema__ = github_schema + __field_names__ = ("column", "line") + column = sgqlc.types.Field(Int, graphql_name="column") + """Column number (1 indexed).""" + + line = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="line") + """Line number (1 indexed).""" + + +class CheckAnnotationSpan(sgqlc.types.Type): + """An inclusive pair of positions for a check annotation.""" + + __schema__ = github_schema + __field_names__ = ("end", "start") + end = sgqlc.types.Field(sgqlc.types.non_null(CheckAnnotationPosition), graphql_name="end") + """End position (inclusive).""" + + start = sgqlc.types.Field(sgqlc.types.non_null(CheckAnnotationPosition), graphql_name="start") + """Start position (inclusive).""" + + +class CheckRunConnection(sgqlc.types.relay.Connection): + """The connection type for CheckRun.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("CheckRunEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("CheckRun"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class CheckRunEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("CheckRun", graphql_name="node") + """The item at the end of the edge.""" + + +class CheckStep(sgqlc.types.Type): + """A single check step.""" + + __schema__ = github_schema + __field_names__ = ("completed_at", "conclusion", "external_id", "name", "number", "seconds_to_completion", "started_at", "status") + completed_at = sgqlc.types.Field(DateTime, graphql_name="completedAt") + """Identifies the date and time when the check step was completed.""" + + conclusion = sgqlc.types.Field(CheckConclusionState, graphql_name="conclusion") + """The conclusion of the check step.""" + + external_id = sgqlc.types.Field(String, graphql_name="externalId") + """A reference for the check step on the integrator's system.""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The step's name.""" + + number = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="number") + """The index of the step in the list of steps of the parent check + run. + """ + + seconds_to_completion = sgqlc.types.Field(Int, graphql_name="secondsToCompletion") + """Number of seconds to completion.""" + + started_at = sgqlc.types.Field(DateTime, graphql_name="startedAt") + """Identifies the date and time when the check step was started.""" + + status = sgqlc.types.Field(sgqlc.types.non_null(CheckStatusState), graphql_name="status") + """The current status of the check step.""" + + +class CheckStepConnection(sgqlc.types.relay.Connection): + """The connection type for CheckStep.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("CheckStepEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of(CheckStep), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class CheckStepEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field(CheckStep, graphql_name="node") + """The item at the end of the edge.""" + + +class CheckSuiteConnection(sgqlc.types.relay.Connection): + """The connection type for CheckSuite.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("CheckSuiteEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("CheckSuite"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class CheckSuiteEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("CheckSuite", graphql_name="node") + """The item at the end of the edge.""" + + +class ClearLabelsFromLabelablePayload(sgqlc.types.Type): + """Autogenerated return type of ClearLabelsFromLabelable""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "labelable") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + labelable = sgqlc.types.Field("Labelable", graphql_name="labelable") + """The item that was unlabeled.""" + + +class CloneProjectPayload(sgqlc.types.Type): + """Autogenerated return type of CloneProject""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "job_status_id", "project") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + job_status_id = sgqlc.types.Field(String, graphql_name="jobStatusId") + """The id of the JobStatus for populating cloned fields.""" + + project = sgqlc.types.Field("Project", graphql_name="project") + """The new cloned project.""" + + +class CloneTemplateRepositoryPayload(sgqlc.types.Type): + """Autogenerated return type of CloneTemplateRepository""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "repository") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + repository = sgqlc.types.Field("Repository", graphql_name="repository") + """The new repository.""" + + +class Closable(sgqlc.types.Interface): + """An object that can be closed""" + + __schema__ = github_schema + __field_names__ = ("closed", "closed_at") + closed = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="closed") + """`true` if the object is closed (definition of closed may depend on + type) + """ + + closed_at = sgqlc.types.Field(DateTime, graphql_name="closedAt") + """Identifies the date and time when the object was closed.""" + + +class CloseIssuePayload(sgqlc.types.Type): + """Autogenerated return type of CloseIssue""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "issue") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + issue = sgqlc.types.Field("Issue", graphql_name="issue") + """The issue that was closed.""" + + +class ClosePullRequestPayload(sgqlc.types.Type): + """Autogenerated return type of ClosePullRequest""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "pull_request") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + pull_request = sgqlc.types.Field("PullRequest", graphql_name="pullRequest") + """The pull request that was closed.""" + + +class Comment(sgqlc.types.Interface): + """Represents a comment.""" + + __schema__ = github_schema + __field_names__ = ( + "author", + "author_association", + "body", + "body_html", + "body_text", + "created_at", + "created_via_email", + "editor", + "id", + "includes_created_edit", + "last_edited_at", + "published_at", + "updated_at", + "user_content_edits", + "viewer_did_author", + ) + author = sgqlc.types.Field(Actor, graphql_name="author") + """The actor who authored the comment.""" + + author_association = sgqlc.types.Field(sgqlc.types.non_null(CommentAuthorAssociation), graphql_name="authorAssociation") + """Author's association with the subject of the comment.""" + + body = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="body") + """The body as Markdown.""" + + body_html = sgqlc.types.Field(sgqlc.types.non_null(HTML), graphql_name="bodyHTML") + """The body rendered to HTML.""" + + body_text = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="bodyText") + """The body rendered to text.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + created_via_email = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="createdViaEmail") + """Check if this comment was created via an email reply.""" + + editor = sgqlc.types.Field(Actor, graphql_name="editor") + """The actor who edited the comment.""" + + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + + includes_created_edit = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="includesCreatedEdit") + """Check if this comment was edited and includes an edit with the + creation data + """ + + last_edited_at = sgqlc.types.Field(DateTime, graphql_name="lastEditedAt") + """The moment the editor made the last edit""" + + published_at = sgqlc.types.Field(DateTime, graphql_name="publishedAt") + """Identifies when the comment was published at.""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + user_content_edits = sgqlc.types.Field( + "UserContentEditConnection", + graphql_name="userContentEdits", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of edits to this content. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + viewer_did_author = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerDidAuthor") + """Did the viewer author this comment.""" + + +class CommitCommentConnection(sgqlc.types.relay.Connection): + """The connection type for CommitComment.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("CommitCommentEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("CommitComment"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class CommitCommentEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("CommitComment", graphql_name="node") + """The item at the end of the edge.""" + + +class CommitConnection(sgqlc.types.relay.Connection): + """The connection type for Commit.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("CommitEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("Commit"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class CommitContributionsByRepository(sgqlc.types.Type): + """This aggregates commits made by a user within one repository.""" + + __schema__ = github_schema + __field_names__ = ("contributions", "repository", "resource_path", "url") + contributions = sgqlc.types.Field( + sgqlc.types.non_null("CreatedCommitContributionConnection"), + graphql_name="contributions", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ( + "order_by", + sgqlc.types.Arg(CommitContributionOrder, graphql_name="orderBy", default={"field": "OCCURRED_AT", "direction": "DESC"}), + ), + ) + ), + ) + """The commit contributions, each representing a day. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `order_by` (`CommitContributionOrder`): Ordering options for + commit contributions returned from the connection. (default: + `{field: OCCURRED_AT, direction: DESC}`) + """ + + repository = sgqlc.types.Field(sgqlc.types.non_null("Repository"), graphql_name="repository") + """The repository in which the commits were made.""" + + resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="resourcePath") + """The HTTP path for the user's commits to the repository in this + time range. + """ + + url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="url") + """The HTTP URL for the user's commits to the repository in this time + range. + """ + + +class CommitEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("Commit", graphql_name="node") + """The item at the end of the edge.""" + + +class CommitHistoryConnection(sgqlc.types.relay.Connection): + """The connection type for Commit.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of(CommitEdge), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("Commit"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class Contribution(sgqlc.types.Interface): + """Represents a contribution a user made on GitHub, such as opening + an issue. + """ + + __schema__ = github_schema + __field_names__ = ("is_restricted", "occurred_at", "resource_path", "url", "user") + is_restricted = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isRestricted") + """Whether this contribution is associated with a record you do not + have access to. For example, your own 'first issue' contribution + may have been made on a repository you can no longer access. + """ + + occurred_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="occurredAt") + """When this contribution was made.""" + + resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="resourcePath") + """The HTTP path for this contribution.""" + + url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="url") + """The HTTP URL for this contribution.""" + + user = sgqlc.types.Field(sgqlc.types.non_null("User"), graphql_name="user") + """The user who made this contribution.""" + + +class ContributionCalendar(sgqlc.types.Type): + """A calendar of contributions made on GitHub by a user.""" + + __schema__ = github_schema + __field_names__ = ("colors", "is_halloween", "months", "total_contributions", "weeks") + colors = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(String))), graphql_name="colors") + """A list of hex color codes used in this calendar. The darker the + color, the more contributions it represents. + """ + + is_halloween = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isHalloween") + """Determine if the color set was chosen because it's currently + Halloween. + """ + + months = sgqlc.types.Field( + sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null("ContributionCalendarMonth"))), graphql_name="months" + ) + """A list of the months of contributions in this calendar.""" + + total_contributions = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalContributions") + """The count of total contributions in the calendar.""" + + weeks = sgqlc.types.Field( + sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null("ContributionCalendarWeek"))), graphql_name="weeks" + ) + """A list of the weeks of contributions in this calendar.""" + + +class ContributionCalendarDay(sgqlc.types.Type): + """Represents a single day of contributions on GitHub by a user.""" + + __schema__ = github_schema + __field_names__ = ("color", "contribution_count", "contribution_level", "date", "weekday") + color = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="color") + """The hex color code that represents how many contributions were + made on this day compared to others in the calendar. + """ + + contribution_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="contributionCount") + """How many contributions were made by the user on this day.""" + + contribution_level = sgqlc.types.Field(sgqlc.types.non_null(ContributionLevel), graphql_name="contributionLevel") + """Indication of contributions, relative to other days. Can be used + to indicate which color to represent this day on a calendar. + """ + + date = sgqlc.types.Field(sgqlc.types.non_null(Date), graphql_name="date") + """The day this square represents.""" + + weekday = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="weekday") + """A number representing which day of the week this square + represents, e.g., 1 is Monday. + """ + + +class ContributionCalendarMonth(sgqlc.types.Type): + """A month of contributions in a user's contribution graph.""" + + __schema__ = github_schema + __field_names__ = ("first_day", "name", "total_weeks", "year") + first_day = sgqlc.types.Field(sgqlc.types.non_null(Date), graphql_name="firstDay") + """The date of the first day of this month.""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The name of the month.""" + + total_weeks = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalWeeks") + """How many weeks started in this month.""" + + year = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="year") + """The year the month occurred in.""" + + +class ContributionCalendarWeek(sgqlc.types.Type): + """A week of contributions in a user's contribution graph.""" + + __schema__ = github_schema + __field_names__ = ("contribution_days", "first_day") + contribution_days = sgqlc.types.Field( + sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(ContributionCalendarDay))), graphql_name="contributionDays" + ) + """The days of contributions in this week.""" + + first_day = sgqlc.types.Field(sgqlc.types.non_null(Date), graphql_name="firstDay") + """The date of the earliest square in this week.""" + + +class ContributionsCollection(sgqlc.types.Type): + """A contributions collection aggregates contributions such as opened + issues and commits created by a user. + """ + + __schema__ = github_schema + __field_names__ = ( + "commit_contributions_by_repository", + "contribution_calendar", + "contribution_years", + "does_end_in_current_month", + "earliest_restricted_contribution_date", + "ended_at", + "first_issue_contribution", + "first_pull_request_contribution", + "first_repository_contribution", + "has_activity_in_the_past", + "has_any_contributions", + "has_any_restricted_contributions", + "is_single_day", + "issue_contributions", + "issue_contributions_by_repository", + "joined_git_hub_contribution", + "latest_restricted_contribution_date", + "most_recent_collection_with_activity", + "most_recent_collection_without_activity", + "popular_issue_contribution", + "popular_pull_request_contribution", + "pull_request_contributions", + "pull_request_contributions_by_repository", + "pull_request_review_contributions", + "pull_request_review_contributions_by_repository", + "repository_contributions", + "restricted_contributions_count", + "started_at", + "total_commit_contributions", + "total_issue_contributions", + "total_pull_request_contributions", + "total_pull_request_review_contributions", + "total_repositories_with_contributed_commits", + "total_repositories_with_contributed_issues", + "total_repositories_with_contributed_pull_request_reviews", + "total_repositories_with_contributed_pull_requests", + "total_repository_contributions", + "user", + ) + commit_contributions_by_repository = sgqlc.types.Field( + sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(CommitContributionsByRepository))), + graphql_name="commitContributionsByRepository", + args=sgqlc.types.ArgDict((("max_repositories", sgqlc.types.Arg(Int, graphql_name="maxRepositories", default=25)),)), + ) + """Commit contributions made by the user, grouped by repository. + + Arguments: + + * `max_repositories` (`Int`): How many repositories should be + included. (default: `25`) + """ + + contribution_calendar = sgqlc.types.Field(sgqlc.types.non_null(ContributionCalendar), graphql_name="contributionCalendar") + """A calendar of this user's contributions on GitHub.""" + + contribution_years = sgqlc.types.Field( + sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(Int))), graphql_name="contributionYears" + ) + """The years the user has been making contributions with the most + recent year first. + """ + + does_end_in_current_month = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="doesEndInCurrentMonth") + """Determine if this collection's time span ends in the current + month. + """ + + earliest_restricted_contribution_date = sgqlc.types.Field(Date, graphql_name="earliestRestrictedContributionDate") + """The date of the first restricted contribution the user made in + this time period. Can only be non-null when the user has enabled + private contribution counts. + """ + + ended_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="endedAt") + """The ending date and time of this collection.""" + + first_issue_contribution = sgqlc.types.Field("CreatedIssueOrRestrictedContribution", graphql_name="firstIssueContribution") + """The first issue the user opened on GitHub. This will be null if + that issue was opened outside the collection's time range and + ignoreTimeRange is false. If the issue is not visible but the user + has opted to show private contributions, a RestrictedContribution + will be returned. + """ + + first_pull_request_contribution = sgqlc.types.Field( + "CreatedPullRequestOrRestrictedContribution", graphql_name="firstPullRequestContribution" + ) + """The first pull request the user opened on GitHub. This will be + null if that pull request was opened outside the collection's time + range and ignoreTimeRange is not true. If the pull request is not + visible but the user has opted to show private contributions, a + RestrictedContribution will be returned. + """ + + first_repository_contribution = sgqlc.types.Field( + "CreatedRepositoryOrRestrictedContribution", graphql_name="firstRepositoryContribution" + ) + """The first repository the user created on GitHub. This will be null + if that first repository was created outside the collection's time + range and ignoreTimeRange is false. If the repository is not + visible, then a RestrictedContribution is returned. + """ + + has_activity_in_the_past = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="hasActivityInThePast") + """Does the user have any more activity in the timeline that occurred + prior to the collection's time range? + """ + + has_any_contributions = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="hasAnyContributions") + """Determine if there are any contributions in this collection.""" + + has_any_restricted_contributions = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="hasAnyRestrictedContributions") + """Determine if the user made any contributions in this time frame + whose details are not visible because they were made in a private + repository. Can only be true if the user enabled private + contribution counts. + """ + + is_single_day = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isSingleDay") + """Whether or not the collector's time span is all within the same + day. + """ + + issue_contributions = sgqlc.types.Field( + sgqlc.types.non_null("CreatedIssueContributionConnection"), + graphql_name="issueContributions", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("exclude_first", sgqlc.types.Arg(Boolean, graphql_name="excludeFirst", default=False)), + ("exclude_popular", sgqlc.types.Arg(Boolean, graphql_name="excludePopular", default=False)), + ("order_by", sgqlc.types.Arg(ContributionOrder, graphql_name="orderBy", default={"direction": "DESC"})), + ) + ), + ) + """A list of issues the user opened. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `exclude_first` (`Boolean`): Should the user's first issue ever + be excluded from the result. (default: `false`) + * `exclude_popular` (`Boolean`): Should the user's most commented + issue be excluded from the result. (default: `false`) + * `order_by` (`ContributionOrder`): Ordering options for + contributions returned from the connection. (default: + `{direction: DESC}`) + """ + + issue_contributions_by_repository = sgqlc.types.Field( + sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null("IssueContributionsByRepository"))), + graphql_name="issueContributionsByRepository", + args=sgqlc.types.ArgDict( + ( + ("max_repositories", sgqlc.types.Arg(Int, graphql_name="maxRepositories", default=25)), + ("exclude_first", sgqlc.types.Arg(Boolean, graphql_name="excludeFirst", default=False)), + ("exclude_popular", sgqlc.types.Arg(Boolean, graphql_name="excludePopular", default=False)), + ) + ), + ) + """Issue contributions made by the user, grouped by repository. + + Arguments: + + * `max_repositories` (`Int`): How many repositories should be + included. (default: `25`) + * `exclude_first` (`Boolean`): Should the user's first issue ever + be excluded from the result. (default: `false`) + * `exclude_popular` (`Boolean`): Should the user's most commented + issue be excluded from the result. (default: `false`) + """ + + joined_git_hub_contribution = sgqlc.types.Field("JoinedGitHubContribution", graphql_name="joinedGitHubContribution") + """When the user signed up for GitHub. This will be null if that sign + up date falls outside the collection's time range and + ignoreTimeRange is false. + """ + + latest_restricted_contribution_date = sgqlc.types.Field(Date, graphql_name="latestRestrictedContributionDate") + """The date of the most recent restricted contribution the user made + in this time period. Can only be non-null when the user has + enabled private contribution counts. + """ + + most_recent_collection_with_activity = sgqlc.types.Field("ContributionsCollection", graphql_name="mostRecentCollectionWithActivity") + """When this collection's time range does not include any activity + from the user, use this to get a different collection from an + earlier time range that does have activity. + """ + + most_recent_collection_without_activity = sgqlc.types.Field( + "ContributionsCollection", graphql_name="mostRecentCollectionWithoutActivity" + ) + """Returns a different contributions collection from an earlier time + range than this one that does not have any contributions. + """ + + popular_issue_contribution = sgqlc.types.Field("CreatedIssueContribution", graphql_name="popularIssueContribution") + """The issue the user opened on GitHub that received the most + comments in the specified time frame. + """ + + popular_pull_request_contribution = sgqlc.types.Field("CreatedPullRequestContribution", graphql_name="popularPullRequestContribution") + """The pull request the user opened on GitHub that received the most + comments in the specified time frame. + """ + + pull_request_contributions = sgqlc.types.Field( + sgqlc.types.non_null("CreatedPullRequestContributionConnection"), + graphql_name="pullRequestContributions", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("exclude_first", sgqlc.types.Arg(Boolean, graphql_name="excludeFirst", default=False)), + ("exclude_popular", sgqlc.types.Arg(Boolean, graphql_name="excludePopular", default=False)), + ("order_by", sgqlc.types.Arg(ContributionOrder, graphql_name="orderBy", default={"direction": "DESC"})), + ) + ), + ) + """Pull request contributions made by the user. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `exclude_first` (`Boolean`): Should the user's first pull + request ever be excluded from the result. (default: `false`) + * `exclude_popular` (`Boolean`): Should the user's most commented + pull request be excluded from the result. (default: `false`) + * `order_by` (`ContributionOrder`): Ordering options for + contributions returned from the connection. (default: + `{direction: DESC}`) + """ + + pull_request_contributions_by_repository = sgqlc.types.Field( + sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null("PullRequestContributionsByRepository"))), + graphql_name="pullRequestContributionsByRepository", + args=sgqlc.types.ArgDict( + ( + ("max_repositories", sgqlc.types.Arg(Int, graphql_name="maxRepositories", default=25)), + ("exclude_first", sgqlc.types.Arg(Boolean, graphql_name="excludeFirst", default=False)), + ("exclude_popular", sgqlc.types.Arg(Boolean, graphql_name="excludePopular", default=False)), + ) + ), + ) + """Pull request contributions made by the user, grouped by + repository. + + Arguments: + + * `max_repositories` (`Int`): How many repositories should be + included. (default: `25`) + * `exclude_first` (`Boolean`): Should the user's first pull + request ever be excluded from the result. (default: `false`) + * `exclude_popular` (`Boolean`): Should the user's most commented + pull request be excluded from the result. (default: `false`) + """ + + pull_request_review_contributions = sgqlc.types.Field( + sgqlc.types.non_null("CreatedPullRequestReviewContributionConnection"), + graphql_name="pullRequestReviewContributions", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("order_by", sgqlc.types.Arg(ContributionOrder, graphql_name="orderBy", default={"direction": "DESC"})), + ) + ), + ) + """Pull request review contributions made by the user. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `order_by` (`ContributionOrder`): Ordering options for + contributions returned from the connection. (default: + `{direction: DESC}`) + """ + + pull_request_review_contributions_by_repository = sgqlc.types.Field( + sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null("PullRequestReviewContributionsByRepository"))), + graphql_name="pullRequestReviewContributionsByRepository", + args=sgqlc.types.ArgDict((("max_repositories", sgqlc.types.Arg(Int, graphql_name="maxRepositories", default=25)),)), + ) + """Pull request review contributions made by the user, grouped by + repository. + + Arguments: + + * `max_repositories` (`Int`): How many repositories should be + included. (default: `25`) + """ + + repository_contributions = sgqlc.types.Field( + sgqlc.types.non_null("CreatedRepositoryContributionConnection"), + graphql_name="repositoryContributions", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("exclude_first", sgqlc.types.Arg(Boolean, graphql_name="excludeFirst", default=False)), + ("order_by", sgqlc.types.Arg(ContributionOrder, graphql_name="orderBy", default={"direction": "DESC"})), + ) + ), + ) + """A list of repositories owned by the user that the user created in + this time range. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `exclude_first` (`Boolean`): Should the user's first repository + ever be excluded from the result. (default: `false`) + * `order_by` (`ContributionOrder`): Ordering options for + contributions returned from the connection. (default: + `{direction: DESC}`) + """ + + restricted_contributions_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="restrictedContributionsCount") + """A count of contributions made by the user that the viewer cannot + access. Only non-zero when the user has chosen to share their + private contribution counts. + """ + + started_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="startedAt") + """The beginning date and time of this collection.""" + + total_commit_contributions = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCommitContributions") + """How many commits were made by the user in this time span.""" + + total_issue_contributions = sgqlc.types.Field( + sgqlc.types.non_null(Int), + graphql_name="totalIssueContributions", + args=sgqlc.types.ArgDict( + ( + ("exclude_first", sgqlc.types.Arg(Boolean, graphql_name="excludeFirst", default=False)), + ("exclude_popular", sgqlc.types.Arg(Boolean, graphql_name="excludePopular", default=False)), + ) + ), + ) + """How many issues the user opened. + + Arguments: + + * `exclude_first` (`Boolean`): Should the user's first issue ever + be excluded from this count. (default: `false`) + * `exclude_popular` (`Boolean`): Should the user's most commented + issue be excluded from this count. (default: `false`) + """ + + total_pull_request_contributions = sgqlc.types.Field( + sgqlc.types.non_null(Int), + graphql_name="totalPullRequestContributions", + args=sgqlc.types.ArgDict( + ( + ("exclude_first", sgqlc.types.Arg(Boolean, graphql_name="excludeFirst", default=False)), + ("exclude_popular", sgqlc.types.Arg(Boolean, graphql_name="excludePopular", default=False)), + ) + ), + ) + """How many pull requests the user opened. + + Arguments: + + * `exclude_first` (`Boolean`): Should the user's first pull + request ever be excluded from this count. (default: `false`) + * `exclude_popular` (`Boolean`): Should the user's most commented + pull request be excluded from this count. (default: `false`) + """ + + total_pull_request_review_contributions = sgqlc.types.Field( + sgqlc.types.non_null(Int), graphql_name="totalPullRequestReviewContributions" + ) + """How many pull request reviews the user left.""" + + total_repositories_with_contributed_commits = sgqlc.types.Field( + sgqlc.types.non_null(Int), graphql_name="totalRepositoriesWithContributedCommits" + ) + """How many different repositories the user committed to.""" + + total_repositories_with_contributed_issues = sgqlc.types.Field( + sgqlc.types.non_null(Int), + graphql_name="totalRepositoriesWithContributedIssues", + args=sgqlc.types.ArgDict( + ( + ("exclude_first", sgqlc.types.Arg(Boolean, graphql_name="excludeFirst", default=False)), + ("exclude_popular", sgqlc.types.Arg(Boolean, graphql_name="excludePopular", default=False)), + ) + ), + ) + """How many different repositories the user opened issues in. + + Arguments: + + * `exclude_first` (`Boolean`): Should the user's first issue ever + be excluded from this count. (default: `false`) + * `exclude_popular` (`Boolean`): Should the user's most commented + issue be excluded from this count. (default: `false`) + """ + + total_repositories_with_contributed_pull_request_reviews = sgqlc.types.Field( + sgqlc.types.non_null(Int), graphql_name="totalRepositoriesWithContributedPullRequestReviews" + ) + """How many different repositories the user left pull request reviews + in. + """ + + total_repositories_with_contributed_pull_requests = sgqlc.types.Field( + sgqlc.types.non_null(Int), + graphql_name="totalRepositoriesWithContributedPullRequests", + args=sgqlc.types.ArgDict( + ( + ("exclude_first", sgqlc.types.Arg(Boolean, graphql_name="excludeFirst", default=False)), + ("exclude_popular", sgqlc.types.Arg(Boolean, graphql_name="excludePopular", default=False)), + ) + ), + ) + """How many different repositories the user opened pull requests in. + + Arguments: + + * `exclude_first` (`Boolean`): Should the user's first pull + request ever be excluded from this count. (default: `false`) + * `exclude_popular` (`Boolean`): Should the user's most commented + pull request be excluded from this count. (default: `false`) + """ + + total_repository_contributions = sgqlc.types.Field( + sgqlc.types.non_null(Int), + graphql_name="totalRepositoryContributions", + args=sgqlc.types.ArgDict((("exclude_first", sgqlc.types.Arg(Boolean, graphql_name="excludeFirst", default=False)),)), + ) + """How many repositories the user created. + + Arguments: + + * `exclude_first` (`Boolean`): Should the user's first repository + ever be excluded from this count. (default: `false`) + """ + + user = sgqlc.types.Field(sgqlc.types.non_null("User"), graphql_name="user") + """The user who made the contributions in this collection.""" + + +class ConvertProjectCardNoteToIssuePayload(sgqlc.types.Type): + """Autogenerated return type of ConvertProjectCardNoteToIssue""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "project_card") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + project_card = sgqlc.types.Field("ProjectCard", graphql_name="projectCard") + """The updated ProjectCard.""" + + +class ConvertPullRequestToDraftPayload(sgqlc.types.Type): + """Autogenerated return type of ConvertPullRequestToDraft""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "pull_request") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + pull_request = sgqlc.types.Field("PullRequest", graphql_name="pullRequest") + """The pull request that is now a draft.""" + + +class CreateBranchProtectionRulePayload(sgqlc.types.Type): + """Autogenerated return type of CreateBranchProtectionRule""" + + __schema__ = github_schema + __field_names__ = ("branch_protection_rule", "client_mutation_id") + branch_protection_rule = sgqlc.types.Field("BranchProtectionRule", graphql_name="branchProtectionRule") + """The newly created BranchProtectionRule.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class CreateCheckRunPayload(sgqlc.types.Type): + """Autogenerated return type of CreateCheckRun""" + + __schema__ = github_schema + __field_names__ = ("check_run", "client_mutation_id") + check_run = sgqlc.types.Field("CheckRun", graphql_name="checkRun") + """The newly created check run.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class CreateCheckSuitePayload(sgqlc.types.Type): + """Autogenerated return type of CreateCheckSuite""" + + __schema__ = github_schema + __field_names__ = ("check_suite", "client_mutation_id") + check_suite = sgqlc.types.Field("CheckSuite", graphql_name="checkSuite") + """The newly created check suite.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class CreateCommitOnBranchPayload(sgqlc.types.Type): + """Autogenerated return type of CreateCommitOnBranch""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "commit", "ref") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + commit = sgqlc.types.Field("Commit", graphql_name="commit") + """The new commit.""" + + ref = sgqlc.types.Field("Ref", graphql_name="ref") + """The ref which has been updated to point to the new commit.""" + + +class CreateDiscussionPayload(sgqlc.types.Type): + """Autogenerated return type of CreateDiscussion""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "discussion") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + discussion = sgqlc.types.Field("Discussion", graphql_name="discussion") + """The discussion that was just created.""" + + +class CreateEnterpriseOrganizationPayload(sgqlc.types.Type): + """Autogenerated return type of CreateEnterpriseOrganization""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "enterprise", "organization") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + enterprise = sgqlc.types.Field("Enterprise", graphql_name="enterprise") + """The enterprise that owns the created organization.""" + + organization = sgqlc.types.Field("Organization", graphql_name="organization") + """The organization that was created.""" + + +class CreateEnvironmentPayload(sgqlc.types.Type): + """Autogenerated return type of CreateEnvironment""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "environment") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + environment = sgqlc.types.Field("Environment", graphql_name="environment") + """The new or existing environment.""" + + +class CreateIpAllowListEntryPayload(sgqlc.types.Type): + """Autogenerated return type of CreateIpAllowListEntry""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "ip_allow_list_entry") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + ip_allow_list_entry = sgqlc.types.Field("IpAllowListEntry", graphql_name="ipAllowListEntry") + """The IP allow list entry that was created.""" + + +class CreateIssuePayload(sgqlc.types.Type): + """Autogenerated return type of CreateIssue""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "issue") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + issue = sgqlc.types.Field("Issue", graphql_name="issue") + """The new issue.""" + + +class CreateMigrationSourcePayload(sgqlc.types.Type): + """Autogenerated return type of CreateMigrationSource""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "migration_source") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + migration_source = sgqlc.types.Field("MigrationSource", graphql_name="migrationSource") + """The created Octoshift migration source.""" + + +class CreateProjectPayload(sgqlc.types.Type): + """Autogenerated return type of CreateProject""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "project") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + project = sgqlc.types.Field("Project", graphql_name="project") + """The new project.""" + + +class CreatePullRequestPayload(sgqlc.types.Type): + """Autogenerated return type of CreatePullRequest""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "pull_request") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + pull_request = sgqlc.types.Field("PullRequest", graphql_name="pullRequest") + """The new pull request.""" + + +class CreateRefPayload(sgqlc.types.Type): + """Autogenerated return type of CreateRef""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "ref") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + ref = sgqlc.types.Field("Ref", graphql_name="ref") + """The newly created ref.""" + + +class CreateRepositoryPayload(sgqlc.types.Type): + """Autogenerated return type of CreateRepository""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "repository") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + repository = sgqlc.types.Field("Repository", graphql_name="repository") + """The new repository.""" + + +class CreateSponsorsTierPayload(sgqlc.types.Type): + """Autogenerated return type of CreateSponsorsTier""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "sponsors_tier") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + sponsors_tier = sgqlc.types.Field("SponsorsTier", graphql_name="sponsorsTier") + """The new tier.""" + + +class CreateSponsorshipPayload(sgqlc.types.Type): + """Autogenerated return type of CreateSponsorship""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "sponsorship") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + sponsorship = sgqlc.types.Field("Sponsorship", graphql_name="sponsorship") + """The sponsorship that was started.""" + + +class CreateTeamDiscussionCommentPayload(sgqlc.types.Type): + """Autogenerated return type of CreateTeamDiscussionComment""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "team_discussion_comment") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + team_discussion_comment = sgqlc.types.Field("TeamDiscussionComment", graphql_name="teamDiscussionComment") + """The new comment.""" + + +class CreateTeamDiscussionPayload(sgqlc.types.Type): + """Autogenerated return type of CreateTeamDiscussion""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "team_discussion") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + team_discussion = sgqlc.types.Field("TeamDiscussion", graphql_name="teamDiscussion") + """The new discussion.""" + + +class CreatedCommitContributionConnection(sgqlc.types.relay.Connection): + """The connection type for CreatedCommitContribution.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("CreatedCommitContributionEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("CreatedCommitContribution"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of commits across days and repositories + in the connection. + """ + + +class CreatedCommitContributionEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("CreatedCommitContribution", graphql_name="node") + """The item at the end of the edge.""" + + +class CreatedIssueContributionConnection(sgqlc.types.relay.Connection): + """The connection type for CreatedIssueContribution.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("CreatedIssueContributionEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("CreatedIssueContribution"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class CreatedIssueContributionEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("CreatedIssueContribution", graphql_name="node") + """The item at the end of the edge.""" + + +class CreatedPullRequestContributionConnection(sgqlc.types.relay.Connection): + """The connection type for CreatedPullRequestContribution.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("CreatedPullRequestContributionEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("CreatedPullRequestContribution"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class CreatedPullRequestContributionEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("CreatedPullRequestContribution", graphql_name="node") + """The item at the end of the edge.""" + + +class CreatedPullRequestReviewContributionConnection(sgqlc.types.relay.Connection): + """The connection type for CreatedPullRequestReviewContribution.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("CreatedPullRequestReviewContributionEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("CreatedPullRequestReviewContribution"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class CreatedPullRequestReviewContributionEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("CreatedPullRequestReviewContribution", graphql_name="node") + """The item at the end of the edge.""" + + +class CreatedRepositoryContributionConnection(sgqlc.types.relay.Connection): + """The connection type for CreatedRepositoryContribution.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("CreatedRepositoryContributionEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("CreatedRepositoryContribution"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class CreatedRepositoryContributionEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("CreatedRepositoryContribution", graphql_name="node") + """The item at the end of the edge.""" + + +class DeclineTopicSuggestionPayload(sgqlc.types.Type): + """Autogenerated return type of DeclineTopicSuggestion""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "topic") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + topic = sgqlc.types.Field("Topic", graphql_name="topic") + """The declined topic.""" + + +class Deletable(sgqlc.types.Interface): + """Entities that can be deleted.""" + + __schema__ = github_schema + __field_names__ = ("viewer_can_delete",) + viewer_can_delete = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanDelete") + """Check if the current viewer can delete this object.""" + + +class DeleteBranchProtectionRulePayload(sgqlc.types.Type): + """Autogenerated return type of DeleteBranchProtectionRule""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id",) + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class DeleteDeploymentPayload(sgqlc.types.Type): + """Autogenerated return type of DeleteDeployment""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id",) + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class DeleteDiscussionCommentPayload(sgqlc.types.Type): + """Autogenerated return type of DeleteDiscussionComment""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "comment") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + comment = sgqlc.types.Field("DiscussionComment", graphql_name="comment") + """The discussion comment that was just deleted.""" + + +class DeleteDiscussionPayload(sgqlc.types.Type): + """Autogenerated return type of DeleteDiscussion""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "discussion") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + discussion = sgqlc.types.Field("Discussion", graphql_name="discussion") + """The discussion that was just deleted.""" + + +class DeleteEnvironmentPayload(sgqlc.types.Type): + """Autogenerated return type of DeleteEnvironment""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id",) + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class DeleteIpAllowListEntryPayload(sgqlc.types.Type): + """Autogenerated return type of DeleteIpAllowListEntry""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "ip_allow_list_entry") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + ip_allow_list_entry = sgqlc.types.Field("IpAllowListEntry", graphql_name="ipAllowListEntry") + """The IP allow list entry that was deleted.""" + + +class DeleteIssueCommentPayload(sgqlc.types.Type): + """Autogenerated return type of DeleteIssueComment""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id",) + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class DeleteIssuePayload(sgqlc.types.Type): + """Autogenerated return type of DeleteIssue""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "repository") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + repository = sgqlc.types.Field("Repository", graphql_name="repository") + """The repository the issue belonged to""" + + +class DeleteProjectCardPayload(sgqlc.types.Type): + """Autogenerated return type of DeleteProjectCard""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "column", "deleted_card_id") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + column = sgqlc.types.Field("ProjectColumn", graphql_name="column") + """The column the deleted card was in.""" + + deleted_card_id = sgqlc.types.Field(ID, graphql_name="deletedCardId") + """The deleted card ID.""" + + +class DeleteProjectColumnPayload(sgqlc.types.Type): + """Autogenerated return type of DeleteProjectColumn""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "deleted_column_id", "project") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + deleted_column_id = sgqlc.types.Field(ID, graphql_name="deletedColumnId") + """The deleted column ID.""" + + project = sgqlc.types.Field("Project", graphql_name="project") + """The project the deleted column was in.""" + + +class DeleteProjectNextItemPayload(sgqlc.types.Type): + """Autogenerated return type of DeleteProjectNextItem""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "deleted_item_id") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + deleted_item_id = sgqlc.types.Field(ID, graphql_name="deletedItemId") + """The ID of the deleted item.""" + + +class DeleteProjectPayload(sgqlc.types.Type): + """Autogenerated return type of DeleteProject""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "owner") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + owner = sgqlc.types.Field("ProjectOwner", graphql_name="owner") + """The repository or organization the project was removed from.""" + + +class DeletePullRequestReviewCommentPayload(sgqlc.types.Type): + """Autogenerated return type of DeletePullRequestReviewComment""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "pull_request_review") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + pull_request_review = sgqlc.types.Field("PullRequestReview", graphql_name="pullRequestReview") + """The pull request review the deleted comment belonged to.""" + + +class DeletePullRequestReviewPayload(sgqlc.types.Type): + """Autogenerated return type of DeletePullRequestReview""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "pull_request_review") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + pull_request_review = sgqlc.types.Field("PullRequestReview", graphql_name="pullRequestReview") + """The deleted pull request review.""" + + +class DeleteRefPayload(sgqlc.types.Type): + """Autogenerated return type of DeleteRef""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id",) + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class DeleteTeamDiscussionCommentPayload(sgqlc.types.Type): + """Autogenerated return type of DeleteTeamDiscussionComment""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id",) + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class DeleteTeamDiscussionPayload(sgqlc.types.Type): + """Autogenerated return type of DeleteTeamDiscussion""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id",) + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class DeleteVerifiableDomainPayload(sgqlc.types.Type): + """Autogenerated return type of DeleteVerifiableDomain""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "owner") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + owner = sgqlc.types.Field("VerifiableDomainOwner", graphql_name="owner") + """The owning account from which the domain was deleted.""" + + +class DependabotUpdateError(sgqlc.types.Type): + """An error produced from a Dependabot Update""" + + __schema__ = github_schema + __field_names__ = ("body", "error_type", "title") + body = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="body") + """The body of the error""" + + error_type = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="errorType") + """The error code""" + + title = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="title") + """The title of the error""" + + +class DeployKeyConnection(sgqlc.types.relay.Connection): + """The connection type for DeployKey.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("DeployKeyEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("DeployKey"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class DeployKeyEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("DeployKey", graphql_name="node") + """The item at the end of the edge.""" + + +class DeploymentConnection(sgqlc.types.relay.Connection): + """The connection type for Deployment.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("DeploymentEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("Deployment"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class DeploymentEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("Deployment", graphql_name="node") + """The item at the end of the edge.""" + + +class DeploymentProtectionRule(sgqlc.types.Type): + """A protection rule.""" + + __schema__ = github_schema + __field_names__ = ("database_id", "reviewers", "timeout", "type") + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + reviewers = sgqlc.types.Field( + sgqlc.types.non_null("DeploymentReviewerConnection"), + graphql_name="reviewers", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """The teams or users that can review the deployment + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + timeout = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="timeout") + """The timeout in minutes for this protection rule.""" + + type = sgqlc.types.Field(sgqlc.types.non_null(DeploymentProtectionRuleType), graphql_name="type") + """The type of protection rule.""" + + +class DeploymentProtectionRuleConnection(sgqlc.types.relay.Connection): + """The connection type for DeploymentProtectionRule.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("DeploymentProtectionRuleEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of(DeploymentProtectionRule), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class DeploymentProtectionRuleEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field(DeploymentProtectionRule, graphql_name="node") + """The item at the end of the edge.""" + + +class DeploymentRequest(sgqlc.types.Type): + """A request to deploy a workflow run to an environment.""" + + __schema__ = github_schema + __field_names__ = ("current_user_can_approve", "environment", "reviewers", "wait_timer", "wait_timer_started_at") + current_user_can_approve = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="currentUserCanApprove") + """Whether or not the current user can approve the deployment""" + + environment = sgqlc.types.Field(sgqlc.types.non_null("Environment"), graphql_name="environment") + """The target environment of the deployment""" + + reviewers = sgqlc.types.Field( + sgqlc.types.non_null("DeploymentReviewerConnection"), + graphql_name="reviewers", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """The teams or users that can review the deployment + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + wait_timer = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="waitTimer") + """The wait timer in minutes configured in the environment""" + + wait_timer_started_at = sgqlc.types.Field(DateTime, graphql_name="waitTimerStartedAt") + """The wait timer in minutes configured in the environment""" + + +class DeploymentRequestConnection(sgqlc.types.relay.Connection): + """The connection type for DeploymentRequest.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("DeploymentRequestEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of(DeploymentRequest), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class DeploymentRequestEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field(DeploymentRequest, graphql_name="node") + """The item at the end of the edge.""" + + +class DeploymentReviewConnection(sgqlc.types.relay.Connection): + """The connection type for DeploymentReview.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("DeploymentReviewEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("DeploymentReview"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class DeploymentReviewEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("DeploymentReview", graphql_name="node") + """The item at the end of the edge.""" + + +class DeploymentReviewerConnection(sgqlc.types.relay.Connection): + """The connection type for DeploymentReviewer.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("DeploymentReviewerEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("DeploymentReviewer"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class DeploymentReviewerEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("DeploymentReviewer", graphql_name="node") + """The item at the end of the edge.""" + + +class DeploymentStatusConnection(sgqlc.types.relay.Connection): + """The connection type for DeploymentStatus.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("DeploymentStatusEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("DeploymentStatus"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class DeploymentStatusEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("DeploymentStatus", graphql_name="node") + """The item at the end of the edge.""" + + +class DisablePullRequestAutoMergePayload(sgqlc.types.Type): + """Autogenerated return type of DisablePullRequestAutoMerge""" + + __schema__ = github_schema + __field_names__ = ("actor", "client_mutation_id", "pull_request") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + pull_request = sgqlc.types.Field("PullRequest", graphql_name="pullRequest") + """The pull request auto merge was disabled on.""" + + +class DiscussionCategoryConnection(sgqlc.types.relay.Connection): + """The connection type for DiscussionCategory.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("DiscussionCategoryEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("DiscussionCategory"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class DiscussionCategoryEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("DiscussionCategory", graphql_name="node") + """The item at the end of the edge.""" + + +class DiscussionCommentConnection(sgqlc.types.relay.Connection): + """The connection type for DiscussionComment.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("DiscussionCommentEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("DiscussionComment"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class DiscussionCommentEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("DiscussionComment", graphql_name="node") + """The item at the end of the edge.""" + + +class DiscussionConnection(sgqlc.types.relay.Connection): + """The connection type for Discussion.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("DiscussionEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("Discussion"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class DiscussionEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("Discussion", graphql_name="node") + """The item at the end of the edge.""" + + +class DiscussionPollOptionConnection(sgqlc.types.relay.Connection): + """The connection type for DiscussionPollOption.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("DiscussionPollOptionEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("DiscussionPollOption"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class DiscussionPollOptionEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("DiscussionPollOption", graphql_name="node") + """The item at the end of the edge.""" + + +class DismissPullRequestReviewPayload(sgqlc.types.Type): + """Autogenerated return type of DismissPullRequestReview""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "pull_request_review") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + pull_request_review = sgqlc.types.Field("PullRequestReview", graphql_name="pullRequestReview") + """The dismissed pull request review.""" + + +class DismissRepositoryVulnerabilityAlertPayload(sgqlc.types.Type): + """Autogenerated return type of DismissRepositoryVulnerabilityAlert""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "repository_vulnerability_alert") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + repository_vulnerability_alert = sgqlc.types.Field("RepositoryVulnerabilityAlert", graphql_name="repositoryVulnerabilityAlert") + """The Dependabot alert that was dismissed""" + + +class EnablePullRequestAutoMergePayload(sgqlc.types.Type): + """Autogenerated return type of EnablePullRequestAutoMerge""" + + __schema__ = github_schema + __field_names__ = ("actor", "client_mutation_id", "pull_request") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + pull_request = sgqlc.types.Field("PullRequest", graphql_name="pullRequest") + """The pull request auto-merge was enabled on.""" + + +class EnterpriseAdministratorConnection(sgqlc.types.relay.Connection): + """The connection type for User.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("EnterpriseAdministratorEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("User"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class EnterpriseAdministratorEdge(sgqlc.types.Type): + """A User who is an administrator of an enterprise.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node", "role") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("User", graphql_name="node") + """The item at the end of the edge.""" + + role = sgqlc.types.Field(sgqlc.types.non_null(EnterpriseAdministratorRole), graphql_name="role") + """The role of the administrator.""" + + +class EnterpriseAdministratorInvitationConnection(sgqlc.types.relay.Connection): + """The connection type for EnterpriseAdministratorInvitation.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("EnterpriseAdministratorInvitationEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("EnterpriseAdministratorInvitation"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class EnterpriseAdministratorInvitationEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("EnterpriseAdministratorInvitation", graphql_name="node") + """The item at the end of the edge.""" + + +class EnterpriseAuditEntryData(sgqlc.types.Interface): + """Metadata for an audit entry containing enterprise account + information. + """ + + __schema__ = github_schema + __field_names__ = ("enterprise_resource_path", "enterprise_slug", "enterprise_url") + enterprise_resource_path = sgqlc.types.Field(URI, graphql_name="enterpriseResourcePath") + """The HTTP path for this enterprise.""" + + enterprise_slug = sgqlc.types.Field(String, graphql_name="enterpriseSlug") + """The slug of the enterprise.""" + + enterprise_url = sgqlc.types.Field(URI, graphql_name="enterpriseUrl") + """The HTTP URL for this enterprise.""" + + +class EnterpriseBillingInfo(sgqlc.types.Type): + """Enterprise billing information visible to enterprise billing + managers and owners. + """ + + __schema__ = github_schema + __field_names__ = ( + "all_licensable_users_count", + "asset_packs", + "bandwidth_quota", + "bandwidth_usage", + "bandwidth_usage_percentage", + "storage_quota", + "storage_usage", + "storage_usage_percentage", + "total_available_licenses", + "total_licenses", + ) + all_licensable_users_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="allLicensableUsersCount") + """The number of licenseable users/emails across the enterprise.""" + + asset_packs = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="assetPacks") + """The number of data packs used by all organizations owned by the + enterprise. + """ + + bandwidth_quota = sgqlc.types.Field(sgqlc.types.non_null(Float), graphql_name="bandwidthQuota") + """The bandwidth quota in GB for all organizations owned by the + enterprise. + """ + + bandwidth_usage = sgqlc.types.Field(sgqlc.types.non_null(Float), graphql_name="bandwidthUsage") + """The bandwidth usage in GB for all organizations owned by the + enterprise. + """ + + bandwidth_usage_percentage = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="bandwidthUsagePercentage") + """The bandwidth usage as a percentage of the bandwidth quota.""" + + storage_quota = sgqlc.types.Field(sgqlc.types.non_null(Float), graphql_name="storageQuota") + """The storage quota in GB for all organizations owned by the + enterprise. + """ + + storage_usage = sgqlc.types.Field(sgqlc.types.non_null(Float), graphql_name="storageUsage") + """The storage usage in GB for all organizations owned by the + enterprise. + """ + + storage_usage_percentage = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="storageUsagePercentage") + """The storage usage as a percentage of the storage quota.""" + + total_available_licenses = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalAvailableLicenses") + """The number of available licenses across all owned organizations + based on the unique number of billable users. + """ + + total_licenses = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalLicenses") + """The total number of licenses allocated.""" + + +class EnterpriseMemberConnection(sgqlc.types.relay.Connection): + """The connection type for EnterpriseMember.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("EnterpriseMemberEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("EnterpriseMember"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class EnterpriseMemberEdge(sgqlc.types.Type): + """A User who is a member of an enterprise through one or more + organizations. + """ + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("EnterpriseMember", graphql_name="node") + """The item at the end of the edge.""" + + +class EnterpriseOrganizationMembershipConnection(sgqlc.types.relay.Connection): + """The connection type for Organization.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("EnterpriseOrganizationMembershipEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("Organization"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class EnterpriseOrganizationMembershipEdge(sgqlc.types.Type): + """An enterprise organization that a user is a member of.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node", "role") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("Organization", graphql_name="node") + """The item at the end of the edge.""" + + role = sgqlc.types.Field(sgqlc.types.non_null(EnterpriseUserAccountMembershipRole), graphql_name="role") + """The role of the user in the enterprise membership.""" + + +class EnterpriseOutsideCollaboratorConnection(sgqlc.types.relay.Connection): + """The connection type for User.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("EnterpriseOutsideCollaboratorEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("User"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class EnterpriseOutsideCollaboratorEdge(sgqlc.types.Type): + """A User who is an outside collaborator of an enterprise through one + or more organizations. + """ + + __schema__ = github_schema + __field_names__ = ("cursor", "node", "repositories") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("User", graphql_name="node") + """The item at the end of the edge.""" + + repositories = sgqlc.types.Field( + sgqlc.types.non_null("EnterpriseRepositoryInfoConnection"), + graphql_name="repositories", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("order_by", sgqlc.types.Arg(RepositoryOrder, graphql_name="orderBy", default={"field": "NAME", "direction": "ASC"})), + ) + ), + ) + """The enterprise organization repositories this user is a member of. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `order_by` (`RepositoryOrder`): Ordering options for + repositories. (default: `{field: NAME, direction: ASC}`) + """ + + +class EnterpriseOwnerInfo(sgqlc.types.Type): + """Enterprise information only visible to enterprise owners.""" + + __schema__ = github_schema + __field_names__ = ( + "admins", + "affiliated_users_with_two_factor_disabled", + "affiliated_users_with_two_factor_disabled_exist", + "allow_private_repository_forking_setting", + "allow_private_repository_forking_setting_organizations", + "default_repository_permission_setting", + "default_repository_permission_setting_organizations", + "domains", + "enterprise_server_installations", + "ip_allow_list_enabled_setting", + "ip_allow_list_entries", + "ip_allow_list_for_installed_apps_enabled_setting", + "is_updating_default_repository_permission", + "is_updating_two_factor_requirement", + "members_can_change_repository_visibility_setting", + "members_can_change_repository_visibility_setting_organizations", + "members_can_create_internal_repositories_setting", + "members_can_create_private_repositories_setting", + "members_can_create_public_repositories_setting", + "members_can_create_repositories_setting", + "members_can_create_repositories_setting_organizations", + "members_can_delete_issues_setting", + "members_can_delete_issues_setting_organizations", + "members_can_delete_repositories_setting", + "members_can_delete_repositories_setting_organizations", + "members_can_invite_collaborators_setting", + "members_can_invite_collaborators_setting_organizations", + "members_can_make_purchases_setting", + "members_can_update_protected_branches_setting", + "members_can_update_protected_branches_setting_organizations", + "members_can_view_dependency_insights_setting", + "members_can_view_dependency_insights_setting_organizations", + "notification_delivery_restriction_enabled_setting", + "oidc_provider", + "organization_projects_setting", + "organization_projects_setting_organizations", + "outside_collaborators", + "pending_admin_invitations", + "pending_collaborator_invitations", + "pending_member_invitations", + "repository_projects_setting", + "repository_projects_setting_organizations", + "saml_identity_provider", + "saml_identity_provider_setting_organizations", + "support_entitlements", + "team_discussions_setting", + "team_discussions_setting_organizations", + "two_factor_required_setting", + "two_factor_required_setting_organizations", + ) + admins = sgqlc.types.Field( + sgqlc.types.non_null(EnterpriseAdministratorConnection), + graphql_name="admins", + args=sgqlc.types.ArgDict( + ( + ( + "organization_logins", + sgqlc.types.Arg(sgqlc.types.list_of(sgqlc.types.non_null(String)), graphql_name="organizationLogins", default=None), + ), + ("query", sgqlc.types.Arg(String, graphql_name="query", default=None)), + ("role", sgqlc.types.Arg(EnterpriseAdministratorRole, graphql_name="role", default=None)), + ( + "order_by", + sgqlc.types.Arg(EnterpriseMemberOrder, graphql_name="orderBy", default={"field": "LOGIN", "direction": "ASC"}), + ), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of all of the administrators for this enterprise. + + Arguments: + + * `organization_logins` (`[String!]`): Only return members within + the organizations with these logins + * `query` (`String`): The search string to look for. + * `role` (`EnterpriseAdministratorRole`): The role to filter by. + * `order_by` (`EnterpriseMemberOrder`): Ordering options for + administrators returned from the connection. (default: `{field: + LOGIN, direction: ASC}`) + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + affiliated_users_with_two_factor_disabled = sgqlc.types.Field( + sgqlc.types.non_null("UserConnection"), + graphql_name="affiliatedUsersWithTwoFactorDisabled", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of users in the enterprise who currently have two-factor + authentication disabled. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + affiliated_users_with_two_factor_disabled_exist = sgqlc.types.Field( + sgqlc.types.non_null(Boolean), graphql_name="affiliatedUsersWithTwoFactorDisabledExist" + ) + """Whether or not affiliated users with two-factor authentication + disabled exist in the enterprise. + """ + + allow_private_repository_forking_setting = sgqlc.types.Field( + sgqlc.types.non_null(EnterpriseEnabledDisabledSettingValue), graphql_name="allowPrivateRepositoryForkingSetting" + ) + """The setting value for whether private repository forking is + enabled for repositories in organizations in this enterprise. + """ + + allow_private_repository_forking_setting_organizations = sgqlc.types.Field( + sgqlc.types.non_null("OrganizationConnection"), + graphql_name="allowPrivateRepositoryForkingSettingOrganizations", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("value", sgqlc.types.Arg(sgqlc.types.non_null(Boolean), graphql_name="value", default=None)), + ("order_by", sgqlc.types.Arg(OrganizationOrder, graphql_name="orderBy", default={"field": "LOGIN", "direction": "ASC"})), + ) + ), + ) + """A list of enterprise organizations configured with the provided + private repository forking setting value. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `value` (`Boolean!`): The setting value to find organizations + for. + * `order_by` (`OrganizationOrder`): Ordering options for + organizations with this setting. (default: `{field: LOGIN, + direction: ASC}`) + """ + + default_repository_permission_setting = sgqlc.types.Field( + sgqlc.types.non_null(EnterpriseDefaultRepositoryPermissionSettingValue), graphql_name="defaultRepositoryPermissionSetting" + ) + """The setting value for base repository permissions for + organizations in this enterprise. + """ + + default_repository_permission_setting_organizations = sgqlc.types.Field( + sgqlc.types.non_null("OrganizationConnection"), + graphql_name="defaultRepositoryPermissionSettingOrganizations", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("value", sgqlc.types.Arg(sgqlc.types.non_null(DefaultRepositoryPermissionField), graphql_name="value", default=None)), + ("order_by", sgqlc.types.Arg(OrganizationOrder, graphql_name="orderBy", default={"field": "LOGIN", "direction": "ASC"})), + ) + ), + ) + """A list of enterprise organizations configured with the provided + base repository permission. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `value` (`DefaultRepositoryPermissionField!`): The permission to + find organizations for. + * `order_by` (`OrganizationOrder`): Ordering options for + organizations with this setting. (default: `{field: LOGIN, + direction: ASC}`) + """ + + domains = sgqlc.types.Field( + sgqlc.types.non_null("VerifiableDomainConnection"), + graphql_name="domains", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("is_verified", sgqlc.types.Arg(Boolean, graphql_name="isVerified", default=None)), + ("is_approved", sgqlc.types.Arg(Boolean, graphql_name="isApproved", default=None)), + ( + "order_by", + sgqlc.types.Arg(VerifiableDomainOrder, graphql_name="orderBy", default={"field": "DOMAIN", "direction": "ASC"}), + ), + ) + ), + ) + """A list of domains owned by the enterprise. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `is_verified` (`Boolean`): Filter whether or not the domain is + verified. (default: `null`) + * `is_approved` (`Boolean`): Filter whether or not the domain is + approved. (default: `null`) + * `order_by` (`VerifiableDomainOrder`): Ordering options for + verifiable domains returned. (default: `{field: DOMAIN, + direction: ASC}`) + """ + + enterprise_server_installations = sgqlc.types.Field( + sgqlc.types.non_null("EnterpriseServerInstallationConnection"), + graphql_name="enterpriseServerInstallations", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("connected_only", sgqlc.types.Arg(Boolean, graphql_name="connectedOnly", default=False)), + ( + "order_by", + sgqlc.types.Arg( + EnterpriseServerInstallationOrder, graphql_name="orderBy", default={"field": "HOST_NAME", "direction": "ASC"} + ), + ), + ) + ), + ) + """Enterprise Server installations owned by the enterprise. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `connected_only` (`Boolean`): Whether or not to only return + installations discovered via GitHub Connect. (default: `false`) + * `order_by` (`EnterpriseServerInstallationOrder`): Ordering + options for Enterprise Server installations returned. (default: + `{field: HOST_NAME, direction: ASC}`) + """ + + ip_allow_list_enabled_setting = sgqlc.types.Field( + sgqlc.types.non_null(IpAllowListEnabledSettingValue), graphql_name="ipAllowListEnabledSetting" + ) + """The setting value for whether the enterprise has an IP allow list + enabled. + """ + + ip_allow_list_entries = sgqlc.types.Field( + sgqlc.types.non_null("IpAllowListEntryConnection"), + graphql_name="ipAllowListEntries", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ( + "order_by", + sgqlc.types.Arg( + IpAllowListEntryOrder, graphql_name="orderBy", default={"field": "ALLOW_LIST_VALUE", "direction": "ASC"} + ), + ), + ) + ), + ) + """The IP addresses that are allowed to access resources owned by the + enterprise. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `order_by` (`IpAllowListEntryOrder`): Ordering options for IP + allow list entries returned. (default: `{field: + ALLOW_LIST_VALUE, direction: ASC}`) + """ + + ip_allow_list_for_installed_apps_enabled_setting = sgqlc.types.Field( + sgqlc.types.non_null(IpAllowListForInstalledAppsEnabledSettingValue), graphql_name="ipAllowListForInstalledAppsEnabledSetting" + ) + """The setting value for whether the enterprise has IP allow list + configuration for installed GitHub Apps enabled. + """ + + is_updating_default_repository_permission = sgqlc.types.Field( + sgqlc.types.non_null(Boolean), graphql_name="isUpdatingDefaultRepositoryPermission" + ) + """Whether or not the base repository permission is currently being + updated. + """ + + is_updating_two_factor_requirement = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isUpdatingTwoFactorRequirement") + """Whether the two-factor authentication requirement is currently + being enforced. + """ + + members_can_change_repository_visibility_setting = sgqlc.types.Field( + sgqlc.types.non_null(EnterpriseEnabledDisabledSettingValue), graphql_name="membersCanChangeRepositoryVisibilitySetting" + ) + """The setting value for whether organization members with admin + permissions on a repository can change repository visibility. + """ + + members_can_change_repository_visibility_setting_organizations = sgqlc.types.Field( + sgqlc.types.non_null("OrganizationConnection"), + graphql_name="membersCanChangeRepositoryVisibilitySettingOrganizations", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("value", sgqlc.types.Arg(sgqlc.types.non_null(Boolean), graphql_name="value", default=None)), + ("order_by", sgqlc.types.Arg(OrganizationOrder, graphql_name="orderBy", default={"field": "LOGIN", "direction": "ASC"})), + ) + ), + ) + """A list of enterprise organizations configured with the provided + can change repository visibility setting value. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `value` (`Boolean!`): The setting value to find organizations + for. + * `order_by` (`OrganizationOrder`): Ordering options for + organizations with this setting. (default: `{field: LOGIN, + direction: ASC}`) + """ + + members_can_create_internal_repositories_setting = sgqlc.types.Field( + Boolean, graphql_name="membersCanCreateInternalRepositoriesSetting" + ) + """The setting value for whether members of organizations in the + enterprise can create internal repositories. + """ + + members_can_create_private_repositories_setting = sgqlc.types.Field(Boolean, graphql_name="membersCanCreatePrivateRepositoriesSetting") + """The setting value for whether members of organizations in the + enterprise can create private repositories. + """ + + members_can_create_public_repositories_setting = sgqlc.types.Field(Boolean, graphql_name="membersCanCreatePublicRepositoriesSetting") + """The setting value for whether members of organizations in the + enterprise can create public repositories. + """ + + members_can_create_repositories_setting = sgqlc.types.Field( + EnterpriseMembersCanCreateRepositoriesSettingValue, graphql_name="membersCanCreateRepositoriesSetting" + ) + """The setting value for whether members of organizations in the + enterprise can create repositories. + """ + + members_can_create_repositories_setting_organizations = sgqlc.types.Field( + sgqlc.types.non_null("OrganizationConnection"), + graphql_name="membersCanCreateRepositoriesSettingOrganizations", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ( + "value", + sgqlc.types.Arg( + sgqlc.types.non_null(OrganizationMembersCanCreateRepositoriesSettingValue), graphql_name="value", default=None + ), + ), + ("order_by", sgqlc.types.Arg(OrganizationOrder, graphql_name="orderBy", default={"field": "LOGIN", "direction": "ASC"})), + ) + ), + ) + """A list of enterprise organizations configured with the provided + repository creation setting value. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `value` + (`OrganizationMembersCanCreateRepositoriesSettingValue!`): The + setting to find organizations for. + * `order_by` (`OrganizationOrder`): Ordering options for + organizations with this setting. (default: `{field: LOGIN, + direction: ASC}`) + """ + + members_can_delete_issues_setting = sgqlc.types.Field( + sgqlc.types.non_null(EnterpriseEnabledDisabledSettingValue), graphql_name="membersCanDeleteIssuesSetting" + ) + """The setting value for whether members with admin permissions for + repositories can delete issues. + """ + + members_can_delete_issues_setting_organizations = sgqlc.types.Field( + sgqlc.types.non_null("OrganizationConnection"), + graphql_name="membersCanDeleteIssuesSettingOrganizations", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("value", sgqlc.types.Arg(sgqlc.types.non_null(Boolean), graphql_name="value", default=None)), + ("order_by", sgqlc.types.Arg(OrganizationOrder, graphql_name="orderBy", default={"field": "LOGIN", "direction": "ASC"})), + ) + ), + ) + """A list of enterprise organizations configured with the provided + members can delete issues setting value. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `value` (`Boolean!`): The setting value to find organizations + for. + * `order_by` (`OrganizationOrder`): Ordering options for + organizations with this setting. (default: `{field: LOGIN, + direction: ASC}`) + """ + + members_can_delete_repositories_setting = sgqlc.types.Field( + sgqlc.types.non_null(EnterpriseEnabledDisabledSettingValue), graphql_name="membersCanDeleteRepositoriesSetting" + ) + """The setting value for whether members with admin permissions for + repositories can delete or transfer repositories. + """ + + members_can_delete_repositories_setting_organizations = sgqlc.types.Field( + sgqlc.types.non_null("OrganizationConnection"), + graphql_name="membersCanDeleteRepositoriesSettingOrganizations", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("value", sgqlc.types.Arg(sgqlc.types.non_null(Boolean), graphql_name="value", default=None)), + ("order_by", sgqlc.types.Arg(OrganizationOrder, graphql_name="orderBy", default={"field": "LOGIN", "direction": "ASC"})), + ) + ), + ) + """A list of enterprise organizations configured with the provided + members can delete repositories setting value. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `value` (`Boolean!`): The setting value to find organizations + for. + * `order_by` (`OrganizationOrder`): Ordering options for + organizations with this setting. (default: `{field: LOGIN, + direction: ASC}`) + """ + + members_can_invite_collaborators_setting = sgqlc.types.Field( + sgqlc.types.non_null(EnterpriseEnabledDisabledSettingValue), graphql_name="membersCanInviteCollaboratorsSetting" + ) + """The setting value for whether members of organizations in the + enterprise can invite outside collaborators. + """ + + members_can_invite_collaborators_setting_organizations = sgqlc.types.Field( + sgqlc.types.non_null("OrganizationConnection"), + graphql_name="membersCanInviteCollaboratorsSettingOrganizations", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("value", sgqlc.types.Arg(sgqlc.types.non_null(Boolean), graphql_name="value", default=None)), + ("order_by", sgqlc.types.Arg(OrganizationOrder, graphql_name="orderBy", default={"field": "LOGIN", "direction": "ASC"})), + ) + ), + ) + """A list of enterprise organizations configured with the provided + members can invite collaborators setting value. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `value` (`Boolean!`): The setting value to find organizations + for. + * `order_by` (`OrganizationOrder`): Ordering options for + organizations with this setting. (default: `{field: LOGIN, + direction: ASC}`) + """ + + members_can_make_purchases_setting = sgqlc.types.Field( + sgqlc.types.non_null(EnterpriseMembersCanMakePurchasesSettingValue), graphql_name="membersCanMakePurchasesSetting" + ) + """Indicates whether members of this enterprise's organizations can + purchase additional services for those organizations. + """ + + members_can_update_protected_branches_setting = sgqlc.types.Field( + sgqlc.types.non_null(EnterpriseEnabledDisabledSettingValue), graphql_name="membersCanUpdateProtectedBranchesSetting" + ) + """The setting value for whether members with admin permissions for + repositories can update protected branches. + """ + + members_can_update_protected_branches_setting_organizations = sgqlc.types.Field( + sgqlc.types.non_null("OrganizationConnection"), + graphql_name="membersCanUpdateProtectedBranchesSettingOrganizations", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("value", sgqlc.types.Arg(sgqlc.types.non_null(Boolean), graphql_name="value", default=None)), + ("order_by", sgqlc.types.Arg(OrganizationOrder, graphql_name="orderBy", default={"field": "LOGIN", "direction": "ASC"})), + ) + ), + ) + """A list of enterprise organizations configured with the provided + members can update protected branches setting value. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `value` (`Boolean!`): The setting value to find organizations + for. + * `order_by` (`OrganizationOrder`): Ordering options for + organizations with this setting. (default: `{field: LOGIN, + direction: ASC}`) + """ + + members_can_view_dependency_insights_setting = sgqlc.types.Field( + sgqlc.types.non_null(EnterpriseEnabledDisabledSettingValue), graphql_name="membersCanViewDependencyInsightsSetting" + ) + """The setting value for whether members can view dependency + insights. + """ + + members_can_view_dependency_insights_setting_organizations = sgqlc.types.Field( + sgqlc.types.non_null("OrganizationConnection"), + graphql_name="membersCanViewDependencyInsightsSettingOrganizations", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("value", sgqlc.types.Arg(sgqlc.types.non_null(Boolean), graphql_name="value", default=None)), + ("order_by", sgqlc.types.Arg(OrganizationOrder, graphql_name="orderBy", default={"field": "LOGIN", "direction": "ASC"})), + ) + ), + ) + """A list of enterprise organizations configured with the provided + members can view dependency insights setting value. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `value` (`Boolean!`): The setting value to find organizations + for. + * `order_by` (`OrganizationOrder`): Ordering options for + organizations with this setting. (default: `{field: LOGIN, + direction: ASC}`) + """ + + notification_delivery_restriction_enabled_setting = sgqlc.types.Field( + sgqlc.types.non_null(NotificationRestrictionSettingValue), graphql_name="notificationDeliveryRestrictionEnabledSetting" + ) + """Indicates if email notification delivery for this enterprise is + restricted to verified or approved domains. + """ + + oidc_provider = sgqlc.types.Field("OIDCProvider", graphql_name="oidcProvider") + """The OIDC Identity Provider for the enterprise.""" + + organization_projects_setting = sgqlc.types.Field( + sgqlc.types.non_null(EnterpriseEnabledDisabledSettingValue), graphql_name="organizationProjectsSetting" + ) + """The setting value for whether organization projects are enabled + for organizations in this enterprise. + """ + + organization_projects_setting_organizations = sgqlc.types.Field( + sgqlc.types.non_null("OrganizationConnection"), + graphql_name="organizationProjectsSettingOrganizations", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("value", sgqlc.types.Arg(sgqlc.types.non_null(Boolean), graphql_name="value", default=None)), + ("order_by", sgqlc.types.Arg(OrganizationOrder, graphql_name="orderBy", default={"field": "LOGIN", "direction": "ASC"})), + ) + ), + ) + """A list of enterprise organizations configured with the provided + organization projects setting value. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `value` (`Boolean!`): The setting value to find organizations + for. + * `order_by` (`OrganizationOrder`): Ordering options for + organizations with this setting. (default: `{field: LOGIN, + direction: ASC}`) + """ + + outside_collaborators = sgqlc.types.Field( + sgqlc.types.non_null(EnterpriseOutsideCollaboratorConnection), + graphql_name="outsideCollaborators", + args=sgqlc.types.ArgDict( + ( + ("login", sgqlc.types.Arg(String, graphql_name="login", default=None)), + ("query", sgqlc.types.Arg(String, graphql_name="query", default=None)), + ( + "order_by", + sgqlc.types.Arg(EnterpriseMemberOrder, graphql_name="orderBy", default={"field": "LOGIN", "direction": "ASC"}), + ), + ("visibility", sgqlc.types.Arg(RepositoryVisibility, graphql_name="visibility", default=None)), + ("has_two_factor_enabled", sgqlc.types.Arg(Boolean, graphql_name="hasTwoFactorEnabled", default=None)), + ( + "organization_logins", + sgqlc.types.Arg(sgqlc.types.list_of(sgqlc.types.non_null(String)), graphql_name="organizationLogins", default=None), + ), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of outside collaborators across the repositories in the + enterprise. + + Arguments: + + * `login` (`String`): The login of one specific outside + collaborator. + * `query` (`String`): The search string to look for. + * `order_by` (`EnterpriseMemberOrder`): Ordering options for + outside collaborators returned from the connection. (default: + `{field: LOGIN, direction: ASC}`) + * `visibility` (`RepositoryVisibility`): Only return outside + collaborators on repositories with this visibility. + * `has_two_factor_enabled` (`Boolean`): Only return outside + collaborators with this two-factor authentication status. + (default: `null`) + * `organization_logins` (`[String!]`): Only return outside + collaborators within the organizations with these logins + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + pending_admin_invitations = sgqlc.types.Field( + sgqlc.types.non_null(EnterpriseAdministratorInvitationConnection), + graphql_name="pendingAdminInvitations", + args=sgqlc.types.ArgDict( + ( + ("query", sgqlc.types.Arg(String, graphql_name="query", default=None)), + ( + "order_by", + sgqlc.types.Arg( + EnterpriseAdministratorInvitationOrder, graphql_name="orderBy", default={"field": "CREATED_AT", "direction": "DESC"} + ), + ), + ("role", sgqlc.types.Arg(EnterpriseAdministratorRole, graphql_name="role", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of pending administrator invitations for the enterprise. + + Arguments: + + * `query` (`String`): The search string to look for. + * `order_by` (`EnterpriseAdministratorInvitationOrder`): Ordering + options for pending enterprise administrator invitations + returned from the connection. (default: `{field: CREATED_AT, + direction: DESC}`) + * `role` (`EnterpriseAdministratorRole`): The role to filter by. + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + pending_collaborator_invitations = sgqlc.types.Field( + sgqlc.types.non_null("RepositoryInvitationConnection"), + graphql_name="pendingCollaboratorInvitations", + args=sgqlc.types.ArgDict( + ( + ("query", sgqlc.types.Arg(String, graphql_name="query", default=None)), + ( + "order_by", + sgqlc.types.Arg( + RepositoryInvitationOrder, graphql_name="orderBy", default={"field": "CREATED_AT", "direction": "DESC"} + ), + ), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of pending collaborator invitations across the repositories + in the enterprise. + + Arguments: + + * `query` (`String`): The search string to look for. + * `order_by` (`RepositoryInvitationOrder`): Ordering options for + pending repository collaborator invitations returned from the + connection. (default: `{field: CREATED_AT, direction: DESC}`) + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + pending_member_invitations = sgqlc.types.Field( + sgqlc.types.non_null("EnterprisePendingMemberInvitationConnection"), + graphql_name="pendingMemberInvitations", + args=sgqlc.types.ArgDict( + ( + ("query", sgqlc.types.Arg(String, graphql_name="query", default=None)), + ( + "organization_logins", + sgqlc.types.Arg(sgqlc.types.list_of(sgqlc.types.non_null(String)), graphql_name="organizationLogins", default=None), + ), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of pending member invitations for organizations in the + enterprise. + + Arguments: + + * `query` (`String`): The search string to look for. + * `organization_logins` (`[String!]`): Only return invitations + within the organizations with these logins + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + repository_projects_setting = sgqlc.types.Field( + sgqlc.types.non_null(EnterpriseEnabledDisabledSettingValue), graphql_name="repositoryProjectsSetting" + ) + """The setting value for whether repository projects are enabled in + this enterprise. + """ + + repository_projects_setting_organizations = sgqlc.types.Field( + sgqlc.types.non_null("OrganizationConnection"), + graphql_name="repositoryProjectsSettingOrganizations", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("value", sgqlc.types.Arg(sgqlc.types.non_null(Boolean), graphql_name="value", default=None)), + ("order_by", sgqlc.types.Arg(OrganizationOrder, graphql_name="orderBy", default={"field": "LOGIN", "direction": "ASC"})), + ) + ), + ) + """A list of enterprise organizations configured with the provided + repository projects setting value. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `value` (`Boolean!`): The setting value to find organizations + for. + * `order_by` (`OrganizationOrder`): Ordering options for + organizations with this setting. (default: `{field: LOGIN, + direction: ASC}`) + """ + + saml_identity_provider = sgqlc.types.Field("EnterpriseIdentityProvider", graphql_name="samlIdentityProvider") + """The SAML Identity Provider for the enterprise. When used by a + GitHub App, requires an installation token with read and write + access to members. + """ + + saml_identity_provider_setting_organizations = sgqlc.types.Field( + sgqlc.types.non_null("OrganizationConnection"), + graphql_name="samlIdentityProviderSettingOrganizations", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("value", sgqlc.types.Arg(sgqlc.types.non_null(IdentityProviderConfigurationState), graphql_name="value", default=None)), + ("order_by", sgqlc.types.Arg(OrganizationOrder, graphql_name="orderBy", default={"field": "LOGIN", "direction": "ASC"})), + ) + ), + ) + """A list of enterprise organizations configured with the SAML single + sign-on setting value. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `value` (`IdentityProviderConfigurationState!`): The setting + value to find organizations for. + * `order_by` (`OrganizationOrder`): Ordering options for + organizations with this setting. (default: `{field: LOGIN, + direction: ASC}`) + """ + + support_entitlements = sgqlc.types.Field( + sgqlc.types.non_null(EnterpriseMemberConnection), + graphql_name="supportEntitlements", + args=sgqlc.types.ArgDict( + ( + ( + "order_by", + sgqlc.types.Arg(EnterpriseMemberOrder, graphql_name="orderBy", default={"field": "LOGIN", "direction": "ASC"}), + ), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of members with a support entitlement. + + Arguments: + + * `order_by` (`EnterpriseMemberOrder`): Ordering options for + support entitlement users returned from the connection. + (default: `{field: LOGIN, direction: ASC}`) + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + team_discussions_setting = sgqlc.types.Field( + sgqlc.types.non_null(EnterpriseEnabledDisabledSettingValue), graphql_name="teamDiscussionsSetting" + ) + """The setting value for whether team discussions are enabled for + organizations in this enterprise. + """ + + team_discussions_setting_organizations = sgqlc.types.Field( + sgqlc.types.non_null("OrganizationConnection"), + graphql_name="teamDiscussionsSettingOrganizations", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("value", sgqlc.types.Arg(sgqlc.types.non_null(Boolean), graphql_name="value", default=None)), + ("order_by", sgqlc.types.Arg(OrganizationOrder, graphql_name="orderBy", default={"field": "LOGIN", "direction": "ASC"})), + ) + ), + ) + """A list of enterprise organizations configured with the provided + team discussions setting value. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `value` (`Boolean!`): The setting value to find organizations + for. + * `order_by` (`OrganizationOrder`): Ordering options for + organizations with this setting. (default: `{field: LOGIN, + direction: ASC}`) + """ + + two_factor_required_setting = sgqlc.types.Field( + sgqlc.types.non_null(EnterpriseEnabledSettingValue), graphql_name="twoFactorRequiredSetting" + ) + """The setting value for whether the enterprise requires two-factor + authentication for its organizations and users. + """ + + two_factor_required_setting_organizations = sgqlc.types.Field( + sgqlc.types.non_null("OrganizationConnection"), + graphql_name="twoFactorRequiredSettingOrganizations", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("value", sgqlc.types.Arg(sgqlc.types.non_null(Boolean), graphql_name="value", default=None)), + ("order_by", sgqlc.types.Arg(OrganizationOrder, graphql_name="orderBy", default={"field": "LOGIN", "direction": "ASC"})), + ) + ), + ) + """A list of enterprise organizations configured with the two-factor + authentication setting value. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `value` (`Boolean!`): The setting value to find organizations + for. + * `order_by` (`OrganizationOrder`): Ordering options for + organizations with this setting. (default: `{field: LOGIN, + direction: ASC}`) + """ + + +class EnterprisePendingMemberInvitationConnection(sgqlc.types.relay.Connection): + """The connection type for OrganizationInvitation.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count", "total_unique_user_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("EnterprisePendingMemberInvitationEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("OrganizationInvitation"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + total_unique_user_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalUniqueUserCount") + """Identifies the total count of unique users in the connection.""" + + +class EnterprisePendingMemberInvitationEdge(sgqlc.types.Type): + """An invitation to be a member in an enterprise organization.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("OrganizationInvitation", graphql_name="node") + """The item at the end of the edge.""" + + +class EnterpriseRepositoryInfoConnection(sgqlc.types.relay.Connection): + """The connection type for EnterpriseRepositoryInfo.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("EnterpriseRepositoryInfoEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("EnterpriseRepositoryInfo"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class EnterpriseRepositoryInfoEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("EnterpriseRepositoryInfo", graphql_name="node") + """The item at the end of the edge.""" + + +class EnterpriseServerInstallationConnection(sgqlc.types.relay.Connection): + """The connection type for EnterpriseServerInstallation.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("EnterpriseServerInstallationEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("EnterpriseServerInstallation"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class EnterpriseServerInstallationEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("EnterpriseServerInstallation", graphql_name="node") + """The item at the end of the edge.""" + + +class EnterpriseServerUserAccountConnection(sgqlc.types.relay.Connection): + """The connection type for EnterpriseServerUserAccount.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("EnterpriseServerUserAccountEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("EnterpriseServerUserAccount"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class EnterpriseServerUserAccountEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("EnterpriseServerUserAccount", graphql_name="node") + """The item at the end of the edge.""" + + +class EnterpriseServerUserAccountEmailConnection(sgqlc.types.relay.Connection): + """The connection type for EnterpriseServerUserAccountEmail.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("EnterpriseServerUserAccountEmailEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("EnterpriseServerUserAccountEmail"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class EnterpriseServerUserAccountEmailEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("EnterpriseServerUserAccountEmail", graphql_name="node") + """The item at the end of the edge.""" + + +class EnterpriseServerUserAccountsUploadConnection(sgqlc.types.relay.Connection): + """The connection type for EnterpriseServerUserAccountsUpload.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("EnterpriseServerUserAccountsUploadEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("EnterpriseServerUserAccountsUpload"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class EnterpriseServerUserAccountsUploadEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("EnterpriseServerUserAccountsUpload", graphql_name="node") + """The item at the end of the edge.""" + + +class EnterpriseUserAccountConnection(sgqlc.types.relay.Connection): + """The connection type for EnterpriseUserAccount.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("EnterpriseUserAccountEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("EnterpriseUserAccount"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class EnterpriseUserAccountEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("EnterpriseUserAccount", graphql_name="node") + """The item at the end of the edge.""" + + +class EnvironmentConnection(sgqlc.types.relay.Connection): + """The connection type for Environment.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("EnvironmentEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("Environment"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class EnvironmentEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("Environment", graphql_name="node") + """The item at the end of the edge.""" + + +class ExternalIdentityAttribute(sgqlc.types.Type): + """An attribute for the External Identity attributes collection""" + + __schema__ = github_schema + __field_names__ = ("metadata", "name", "value") + metadata = sgqlc.types.Field(String, graphql_name="metadata") + """The attribute metadata as JSON""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The attribute name""" + + value = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="value") + """The attribute value""" + + +class ExternalIdentityConnection(sgqlc.types.relay.Connection): + """The connection type for ExternalIdentity.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("ExternalIdentityEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("ExternalIdentity"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class ExternalIdentityEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("ExternalIdentity", graphql_name="node") + """The item at the end of the edge.""" + + +class ExternalIdentitySamlAttributes(sgqlc.types.Type): + """SAML attributes for the External Identity""" + + __schema__ = github_schema + __field_names__ = ("attributes", "emails", "family_name", "given_name", "groups", "name_id", "username") + attributes = sgqlc.types.Field( + sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(ExternalIdentityAttribute))), graphql_name="attributes" + ) + """SAML Identity attributes""" + + emails = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null("UserEmailMetadata")), graphql_name="emails") + """The emails associated with the SAML identity""" + + family_name = sgqlc.types.Field(String, graphql_name="familyName") + """Family name of the SAML identity""" + + given_name = sgqlc.types.Field(String, graphql_name="givenName") + """Given name of the SAML identity""" + + groups = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(String)), graphql_name="groups") + """The groups linked to this identity in IDP""" + + name_id = sgqlc.types.Field(String, graphql_name="nameId") + """The NameID of the SAML identity""" + + username = sgqlc.types.Field(String, graphql_name="username") + """The userName of the SAML identity""" + + +class ExternalIdentityScimAttributes(sgqlc.types.Type): + """SCIM attributes for the External Identity""" + + __schema__ = github_schema + __field_names__ = ("emails", "family_name", "given_name", "groups", "username") + emails = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null("UserEmailMetadata")), graphql_name="emails") + """The emails associated with the SCIM identity""" + + family_name = sgqlc.types.Field(String, graphql_name="familyName") + """Family name of the SCIM identity""" + + given_name = sgqlc.types.Field(String, graphql_name="givenName") + """Given name of the SCIM identity""" + + groups = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(String)), graphql_name="groups") + """The groups linked to this identity in IDP""" + + username = sgqlc.types.Field(String, graphql_name="username") + """The userName of the SCIM identity""" + + +class FollowOrganizationPayload(sgqlc.types.Type): + """Autogenerated return type of FollowOrganization""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "organization") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + organization = sgqlc.types.Field("Organization", graphql_name="organization") + """The organization that was followed.""" + + +class FollowUserPayload(sgqlc.types.Type): + """Autogenerated return type of FollowUser""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "user") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + user = sgqlc.types.Field("User", graphql_name="user") + """The user that was followed.""" + + +class FollowerConnection(sgqlc.types.relay.Connection): + """The connection type for User.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("UserEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("User"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class FollowingConnection(sgqlc.types.relay.Connection): + """The connection type for User.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("UserEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("User"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class FundingLink(sgqlc.types.Type): + """A funding platform link for a repository.""" + + __schema__ = github_schema + __field_names__ = ("platform", "url") + platform = sgqlc.types.Field(sgqlc.types.non_null(FundingPlatform), graphql_name="platform") + """The funding platform this link is for.""" + + url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="url") + """The configured URL for this funding link.""" + + +class GistCommentConnection(sgqlc.types.relay.Connection): + """The connection type for GistComment.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("GistCommentEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("GistComment"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class GistCommentEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("GistComment", graphql_name="node") + """The item at the end of the edge.""" + + +class GistConnection(sgqlc.types.relay.Connection): + """The connection type for Gist.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("GistEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("Gist"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class GistEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("Gist", graphql_name="node") + """The item at the end of the edge.""" + + +class GistFile(sgqlc.types.Type): + """A file in a gist.""" + + __schema__ = github_schema + __field_names__ = ("encoded_name", "encoding", "extension", "is_image", "is_truncated", "language", "name", "size", "text") + encoded_name = sgqlc.types.Field(String, graphql_name="encodedName") + """The file name encoded to remove characters that are invalid in URL + paths. + """ + + encoding = sgqlc.types.Field(String, graphql_name="encoding") + """The gist file encoding.""" + + extension = sgqlc.types.Field(String, graphql_name="extension") + """The file extension from the file name.""" + + is_image = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isImage") + """Indicates if this file is an image.""" + + is_truncated = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isTruncated") + """Whether the file's contents were truncated.""" + + language = sgqlc.types.Field("Language", graphql_name="language") + """The programming language this file is written in.""" + + name = sgqlc.types.Field(String, graphql_name="name") + """The gist file name.""" + + size = sgqlc.types.Field(Int, graphql_name="size") + """The gist file size in bytes.""" + + text = sgqlc.types.Field( + String, graphql_name="text", args=sgqlc.types.ArgDict((("truncate", sgqlc.types.Arg(Int, graphql_name="truncate", default=None)),)) + ) + """UTF8 text data or null if the file is binary + + Arguments: + + * `truncate` (`Int`): Optionally truncate the returned file to + this length. + """ + + +class GitActor(sgqlc.types.Type): + """Represents an actor in a Git commit (ie. an author or committer).""" + + __schema__ = github_schema + __field_names__ = ("avatar_url", "date", "email", "name", "user") + avatar_url = sgqlc.types.Field( + sgqlc.types.non_null(URI), + graphql_name="avatarUrl", + args=sgqlc.types.ArgDict((("size", sgqlc.types.Arg(Int, graphql_name="size", default=None)),)), + ) + """A URL pointing to the author's public avatar. + + Arguments: + + * `size` (`Int`): The size of the resulting square image. + """ + + date = sgqlc.types.Field(GitTimestamp, graphql_name="date") + """The timestamp of the Git action (authoring or committing).""" + + email = sgqlc.types.Field(String, graphql_name="email") + """The email in the Git commit.""" + + name = sgqlc.types.Field(String, graphql_name="name") + """The name in the Git commit.""" + + user = sgqlc.types.Field("User", graphql_name="user") + """The GitHub user corresponding to the email field. Null if no such + user exists. + """ + + +class GitActorConnection(sgqlc.types.relay.Connection): + """The connection type for GitActor.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("GitActorEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of(GitActor), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class GitActorEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field(GitActor, graphql_name="node") + """The item at the end of the edge.""" + + +class GitHubMetadata(sgqlc.types.Type): + """Represents information about the GitHub instance.""" + + __schema__ = github_schema + __field_names__ = ( + "git_hub_services_sha", + "git_ip_addresses", + "hook_ip_addresses", + "importer_ip_addresses", + "is_password_authentication_verifiable", + "pages_ip_addresses", + ) + git_hub_services_sha = sgqlc.types.Field(sgqlc.types.non_null(GitObjectID), graphql_name="gitHubServicesSha") + """Returns a String that's a SHA of `github-services`""" + + git_ip_addresses = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(String)), graphql_name="gitIpAddresses") + """IP addresses that users connect to for git operations""" + + hook_ip_addresses = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(String)), graphql_name="hookIpAddresses") + """IP addresses that service hooks are sent from""" + + importer_ip_addresses = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(String)), graphql_name="importerIpAddresses") + """IP addresses that the importer connects from""" + + is_password_authentication_verifiable = sgqlc.types.Field( + sgqlc.types.non_null(Boolean), graphql_name="isPasswordAuthenticationVerifiable" + ) + """Whether or not users are verified""" + + pages_ip_addresses = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(String)), graphql_name="pagesIpAddresses") + """IP addresses for GitHub Pages' A records""" + + +class GitObject(sgqlc.types.Interface): + """Represents a Git object.""" + + __schema__ = github_schema + __field_names__ = ("abbreviated_oid", "commit_resource_path", "commit_url", "id", "oid", "repository") + abbreviated_oid = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="abbreviatedOid") + """An abbreviated version of the Git object ID""" + + commit_resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="commitResourcePath") + """The HTTP path for this Git object""" + + commit_url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="commitUrl") + """The HTTP URL for this Git object""" + + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + + oid = sgqlc.types.Field(sgqlc.types.non_null(GitObjectID), graphql_name="oid") + """The Git object ID""" + + repository = sgqlc.types.Field(sgqlc.types.non_null("Repository"), graphql_name="repository") + """The Repository the Git object belongs to""" + + +class GitSignature(sgqlc.types.Interface): + """Information about a signature (GPG or S/MIME) on a Commit or Tag.""" + + __schema__ = github_schema + __field_names__ = ("email", "is_valid", "payload", "signature", "signer", "state", "was_signed_by_git_hub") + email = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="email") + """Email used to sign this object.""" + + is_valid = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isValid") + """True if the signature is valid and verified by GitHub.""" + + payload = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="payload") + """Payload for GPG signing object. Raw ODB object without the + signature header. + """ + + signature = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="signature") + """ASCII-armored signature header from object.""" + + signer = sgqlc.types.Field("User", graphql_name="signer") + """GitHub user corresponding to the email signing this commit.""" + + state = sgqlc.types.Field(sgqlc.types.non_null(GitSignatureState), graphql_name="state") + """The state of this signature. `VALID` if signature is valid and + verified by GitHub, otherwise represents reason why signature is + considered invalid. + """ + + was_signed_by_git_hub = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="wasSignedByGitHub") + """True if the signature was made with GitHub's signing key.""" + + +class GrantEnterpriseOrganizationsMigratorRolePayload(sgqlc.types.Type): + """Autogenerated return type of + GrantEnterpriseOrganizationsMigratorRole + """ + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "organizations") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + organizations = sgqlc.types.Field( + "OrganizationConnection", + graphql_name="organizations", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """The organizations that had the migrator role applied to for the + given user. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + +class GrantMigratorRolePayload(sgqlc.types.Type): + """Autogenerated return type of GrantMigratorRole""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "success") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + success = sgqlc.types.Field(Boolean, graphql_name="success") + """Did the operation succeed?""" + + +class Hovercard(sgqlc.types.Type): + """Detail needed to display a hovercard for a user""" + + __schema__ = github_schema + __field_names__ = ("contexts",) + contexts = sgqlc.types.Field( + sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null("HovercardContext"))), graphql_name="contexts" + ) + """Each of the contexts for this hovercard""" + + +class HovercardContext(sgqlc.types.Interface): + """An individual line of a hovercard""" + + __schema__ = github_schema + __field_names__ = ("message", "octicon") + message = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="message") + """A string describing this context""" + + octicon = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="octicon") + """An octicon to accompany this context""" + + +class InviteEnterpriseAdminPayload(sgqlc.types.Type): + """Autogenerated return type of InviteEnterpriseAdmin""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "invitation") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + invitation = sgqlc.types.Field("EnterpriseAdministratorInvitation", graphql_name="invitation") + """The created enterprise administrator invitation.""" + + +class IpAllowListEntryConnection(sgqlc.types.relay.Connection): + """The connection type for IpAllowListEntry.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("IpAllowListEntryEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("IpAllowListEntry"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class IpAllowListEntryEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("IpAllowListEntry", graphql_name="node") + """The item at the end of the edge.""" + + +class IssueCommentConnection(sgqlc.types.relay.Connection): + """The connection type for IssueComment.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("IssueCommentEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("IssueComment"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class IssueCommentEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("IssueComment", graphql_name="node") + """The item at the end of the edge.""" + + +class IssueConnection(sgqlc.types.relay.Connection): + """The connection type for Issue.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("IssueEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("Issue"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class IssueContributionsByRepository(sgqlc.types.Type): + """This aggregates issues opened by a user within one repository.""" + + __schema__ = github_schema + __field_names__ = ("contributions", "repository") + contributions = sgqlc.types.Field( + sgqlc.types.non_null(CreatedIssueContributionConnection), + graphql_name="contributions", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("order_by", sgqlc.types.Arg(ContributionOrder, graphql_name="orderBy", default={"direction": "DESC"})), + ) + ), + ) + """The issue contributions. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `order_by` (`ContributionOrder`): Ordering options for + contributions returned from the connection. (default: + `{direction: DESC}`) + """ + + repository = sgqlc.types.Field(sgqlc.types.non_null("Repository"), graphql_name="repository") + """The repository in which the issues were opened.""" + + +class IssueEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("Issue", graphql_name="node") + """The item at the end of the edge.""" + + +class IssueTemplate(sgqlc.types.Type): + """A repository issue template.""" + + __schema__ = github_schema + __field_names__ = ("about", "body", "name", "title") + about = sgqlc.types.Field(String, graphql_name="about") + """The template purpose.""" + + body = sgqlc.types.Field(String, graphql_name="body") + """The suggested issue body.""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The template name.""" + + title = sgqlc.types.Field(String, graphql_name="title") + """The suggested issue title.""" + + +class IssueTimelineConnection(sgqlc.types.relay.Connection): + """The connection type for IssueTimelineItem.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("IssueTimelineItemEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("IssueTimelineItem"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class IssueTimelineItemEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("IssueTimelineItem", graphql_name="node") + """The item at the end of the edge.""" + + +class IssueTimelineItemsConnection(sgqlc.types.relay.Connection): + """The connection type for IssueTimelineItems.""" + + __schema__ = github_schema + __field_names__ = ("edges", "filtered_count", "nodes", "page_count", "page_info", "total_count", "updated_at") + edges = sgqlc.types.Field(sgqlc.types.list_of("IssueTimelineItemsEdge"), graphql_name="edges") + """A list of edges.""" + + filtered_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="filteredCount") + """Identifies the count of items after applying `before` and `after` + filters. + """ + + nodes = sgqlc.types.Field(sgqlc.types.list_of("IssueTimelineItems"), graphql_name="nodes") + """A list of nodes.""" + + page_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="pageCount") + """Identifies the count of items after applying `before`/`after` + filters and `first`/`last`/`skip` slicing. + """ + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the timeline was last updated.""" + + +class IssueTimelineItemsEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("IssueTimelineItems", graphql_name="node") + """The item at the end of the edge.""" + + +class LabelConnection(sgqlc.types.relay.Connection): + """The connection type for Label.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("LabelEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("Label"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class LabelEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("Label", graphql_name="node") + """The item at the end of the edge.""" + + +class Labelable(sgqlc.types.Interface): + """An object that can have labels assigned to it.""" + + __schema__ = github_schema + __field_names__ = ("labels",) + labels = sgqlc.types.Field( + LabelConnection, + graphql_name="labels", + args=sgqlc.types.ArgDict( + ( + ("order_by", sgqlc.types.Arg(LabelOrder, graphql_name="orderBy", default={"field": "CREATED_AT", "direction": "ASC"})), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of labels associated with the object. + + Arguments: + + * `order_by` (`LabelOrder`): Ordering options for labels returned + from the connection. (default: `{field: CREATED_AT, direction: + ASC}`) + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + +class LanguageConnection(sgqlc.types.relay.Connection): + """A list of languages associated with the parent.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count", "total_size") + edges = sgqlc.types.Field(sgqlc.types.list_of("LanguageEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("Language"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + total_size = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalSize") + """The total size in bytes of files written in that language.""" + + +class LanguageEdge(sgqlc.types.Type): + """Represents the language of a repository.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node", "size") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + + node = sgqlc.types.Field(sgqlc.types.non_null("Language"), graphql_name="node") + + size = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="size") + """The number of bytes of code written in the language.""" + + +class LicenseRule(sgqlc.types.Type): + """Describes a License's conditions, permissions, and limitations""" + + __schema__ = github_schema + __field_names__ = ("description", "key", "label") + description = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="description") + """A description of the rule""" + + key = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="key") + """The machine-readable rule key""" + + label = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="label") + """The human-readable rule label""" + + +class LinkRepositoryToProjectPayload(sgqlc.types.Type): + """Autogenerated return type of LinkRepositoryToProject""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "project", "repository") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + project = sgqlc.types.Field("Project", graphql_name="project") + """The linked Project.""" + + repository = sgqlc.types.Field("Repository", graphql_name="repository") + """The linked Repository.""" + + +class LockLockablePayload(sgqlc.types.Type): + """Autogenerated return type of LockLockable""" + + __schema__ = github_schema + __field_names__ = ("actor", "client_mutation_id", "locked_record") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + locked_record = sgqlc.types.Field("Lockable", graphql_name="lockedRecord") + """The item that was locked.""" + + +class Lockable(sgqlc.types.Interface): + """An object that can be locked.""" + + __schema__ = github_schema + __field_names__ = ("active_lock_reason", "locked") + active_lock_reason = sgqlc.types.Field(LockReason, graphql_name="activeLockReason") + """Reason that the conversation was locked.""" + + locked = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="locked") + """`true` if the object is locked""" + + +class MarkDiscussionCommentAsAnswerPayload(sgqlc.types.Type): + """Autogenerated return type of MarkDiscussionCommentAsAnswer""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "discussion") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + discussion = sgqlc.types.Field("Discussion", graphql_name="discussion") + """The discussion that includes the chosen comment.""" + + +class MarkFileAsViewedPayload(sgqlc.types.Type): + """Autogenerated return type of MarkFileAsViewed""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "pull_request") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + pull_request = sgqlc.types.Field("PullRequest", graphql_name="pullRequest") + """The updated pull request.""" + + +class MarkPullRequestReadyForReviewPayload(sgqlc.types.Type): + """Autogenerated return type of MarkPullRequestReadyForReview""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "pull_request") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + pull_request = sgqlc.types.Field("PullRequest", graphql_name="pullRequest") + """The pull request that is ready for review.""" + + +class MarketplaceListingConnection(sgqlc.types.relay.Connection): + """Look up Marketplace Listings""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("MarketplaceListingEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("MarketplaceListing"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class MarketplaceListingEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("MarketplaceListing", graphql_name="node") + """The item at the end of the edge.""" + + +class MemberStatusable(sgqlc.types.Interface): + """Entities that have members who can set status messages.""" + + __schema__ = github_schema + __field_names__ = ("member_statuses",) + member_statuses = sgqlc.types.Field( + sgqlc.types.non_null("UserStatusConnection"), + graphql_name="memberStatuses", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ( + "order_by", + sgqlc.types.Arg(UserStatusOrder, graphql_name="orderBy", default={"field": "UPDATED_AT", "direction": "DESC"}), + ), + ) + ), + ) + """Get the status messages members of this entity have set that are + either public or visible only to the organization. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `order_by` (`UserStatusOrder`): Ordering options for user + statuses returned from the connection. (default: `{field: + UPDATED_AT, direction: DESC}`) + """ + + +class MergeBranchPayload(sgqlc.types.Type): + """Autogenerated return type of MergeBranch""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "merge_commit") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + merge_commit = sgqlc.types.Field("Commit", graphql_name="mergeCommit") + """The resulting merge Commit.""" + + +class MergePullRequestPayload(sgqlc.types.Type): + """Autogenerated return type of MergePullRequest""" + + __schema__ = github_schema + __field_names__ = ("actor", "client_mutation_id", "pull_request") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + pull_request = sgqlc.types.Field("PullRequest", graphql_name="pullRequest") + """The pull request that was merged.""" + + +class Migration(sgqlc.types.Interface): + """Represents an Octoshift migration.""" + + __schema__ = github_schema + __field_names__ = ( + "continue_on_error", + "created_at", + "failure_reason", + "id", + "migration_log_url", + "migration_source", + "repository_name", + "source_url", + "state", + ) + continue_on_error = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="continueOnError") + """The Octoshift migration flag to continue on error.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + failure_reason = sgqlc.types.Field(String, graphql_name="failureReason") + """The reason the migration failed.""" + + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + + migration_log_url = sgqlc.types.Field(URI, graphql_name="migrationLogUrl") + """The URL for the migration log (expires 1 day after migration + completes). + """ + + migration_source = sgqlc.types.Field(sgqlc.types.non_null("MigrationSource"), graphql_name="migrationSource") + """The Octoshift migration source.""" + + repository_name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="repositoryName") + """The target repository name.""" + + source_url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="sourceUrl") + """The Octoshift migration source URL.""" + + state = sgqlc.types.Field(sgqlc.types.non_null(MigrationState), graphql_name="state") + """The Octoshift migration state.""" + + +class MilestoneConnection(sgqlc.types.relay.Connection): + """The connection type for Milestone.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("MilestoneEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("Milestone"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class MilestoneEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("Milestone", graphql_name="node") + """The item at the end of the edge.""" + + +class Minimizable(sgqlc.types.Interface): + """Entities that can be minimized.""" + + __schema__ = github_schema + __field_names__ = ("is_minimized", "minimized_reason", "viewer_can_minimize") + is_minimized = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isMinimized") + """Returns whether or not a comment has been minimized.""" + + minimized_reason = sgqlc.types.Field(String, graphql_name="minimizedReason") + """Returns why the comment was minimized.""" + + viewer_can_minimize = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanMinimize") + """Check if the current viewer can minimize this object.""" + + +class MinimizeCommentPayload(sgqlc.types.Type): + """Autogenerated return type of MinimizeComment""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "minimized_comment") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + minimized_comment = sgqlc.types.Field(Minimizable, graphql_name="minimizedComment") + """The comment that was minimized.""" + + +class MoveProjectCardPayload(sgqlc.types.Type): + """Autogenerated return type of MoveProjectCard""" + + __schema__ = github_schema + __field_names__ = ("card_edge", "client_mutation_id") + card_edge = sgqlc.types.Field("ProjectCardEdge", graphql_name="cardEdge") + """The new edge of the moved card.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class MoveProjectColumnPayload(sgqlc.types.Type): + """Autogenerated return type of MoveProjectColumn""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "column_edge") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + column_edge = sgqlc.types.Field("ProjectColumnEdge", graphql_name="columnEdge") + """The new edge of the moved column.""" + + +class Mutation(sgqlc.types.Type): + """The root query for implementing GraphQL mutations.""" + + __schema__ = github_schema + __field_names__ = ( + "abort_queued_migrations", + "accept_enterprise_administrator_invitation", + "accept_topic_suggestion", + "add_assignees_to_assignable", + "add_comment", + "add_discussion_comment", + "add_discussion_poll_vote", + "add_enterprise_support_entitlement", + "add_labels_to_labelable", + "add_project_card", + "add_project_column", + "add_project_draft_issue", + "add_project_next_item", + "add_pull_request_review", + "add_pull_request_review_comment", + "add_pull_request_review_thread", + "add_reaction", + "add_star", + "add_upvote", + "add_verifiable_domain", + "approve_deployments", + "approve_verifiable_domain", + "archive_repository", + "cancel_enterprise_admin_invitation", + "cancel_sponsorship", + "change_user_status", + "clear_labels_from_labelable", + "clone_project", + "clone_template_repository", + "close_issue", + "close_pull_request", + "convert_project_card_note_to_issue", + "convert_pull_request_to_draft", + "create_branch_protection_rule", + "create_check_run", + "create_check_suite", + "create_commit_on_branch", + "create_discussion", + "create_enterprise_organization", + "create_environment", + "create_ip_allow_list_entry", + "create_issue", + "create_migration_source", + "create_project", + "create_pull_request", + "create_ref", + "create_repository", + "create_sponsors_tier", + "create_sponsorship", + "create_team_discussion", + "create_team_discussion_comment", + "decline_topic_suggestion", + "delete_branch_protection_rule", + "delete_deployment", + "delete_discussion", + "delete_discussion_comment", + "delete_environment", + "delete_ip_allow_list_entry", + "delete_issue", + "delete_issue_comment", + "delete_project", + "delete_project_card", + "delete_project_column", + "delete_project_next_item", + "delete_pull_request_review", + "delete_pull_request_review_comment", + "delete_ref", + "delete_team_discussion", + "delete_team_discussion_comment", + "delete_verifiable_domain", + "disable_pull_request_auto_merge", + "dismiss_pull_request_review", + "dismiss_repository_vulnerability_alert", + "enable_pull_request_auto_merge", + "follow_organization", + "follow_user", + "grant_enterprise_organizations_migrator_role", + "grant_migrator_role", + "invite_enterprise_admin", + "link_repository_to_project", + "lock_lockable", + "mark_discussion_comment_as_answer", + "mark_file_as_viewed", + "mark_pull_request_ready_for_review", + "merge_branch", + "merge_pull_request", + "minimize_comment", + "move_project_card", + "move_project_column", + "pin_issue", + "regenerate_enterprise_identity_provider_recovery_codes", + "regenerate_verifiable_domain_token", + "reject_deployments", + "remove_assignees_from_assignable", + "remove_enterprise_admin", + "remove_enterprise_identity_provider", + "remove_enterprise_organization", + "remove_enterprise_support_entitlement", + "remove_labels_from_labelable", + "remove_outside_collaborator", + "remove_reaction", + "remove_star", + "remove_upvote", + "reopen_issue", + "reopen_pull_request", + "request_reviews", + "rerequest_check_suite", + "resolve_review_thread", + "revoke_enterprise_organizations_migrator_role", + "revoke_migrator_role", + "set_enterprise_identity_provider", + "set_organization_interaction_limit", + "set_repository_interaction_limit", + "set_user_interaction_limit", + "start_repository_migration", + "submit_pull_request_review", + "transfer_issue", + "unarchive_repository", + "unfollow_organization", + "unfollow_user", + "unlink_repository_from_project", + "unlock_lockable", + "unmark_discussion_comment_as_answer", + "unmark_file_as_viewed", + "unmark_issue_as_duplicate", + "unminimize_comment", + "unpin_issue", + "unresolve_review_thread", + "update_branch_protection_rule", + "update_check_run", + "update_check_suite_preferences", + "update_discussion", + "update_discussion_comment", + "update_enterprise_administrator_role", + "update_enterprise_allow_private_repository_forking_setting", + "update_enterprise_default_repository_permission_setting", + "update_enterprise_members_can_change_repository_visibility_setting", + "update_enterprise_members_can_create_repositories_setting", + "update_enterprise_members_can_delete_issues_setting", + "update_enterprise_members_can_delete_repositories_setting", + "update_enterprise_members_can_invite_collaborators_setting", + "update_enterprise_members_can_make_purchases_setting", + "update_enterprise_members_can_update_protected_branches_setting", + "update_enterprise_members_can_view_dependency_insights_setting", + "update_enterprise_organization_projects_setting", + "update_enterprise_owner_organization_role", + "update_enterprise_profile", + "update_enterprise_repository_projects_setting", + "update_enterprise_team_discussions_setting", + "update_enterprise_two_factor_authentication_required_setting", + "update_environment", + "update_ip_allow_list_enabled_setting", + "update_ip_allow_list_entry", + "update_ip_allow_list_for_installed_apps_enabled_setting", + "update_issue", + "update_issue_comment", + "update_notification_restriction_setting", + "update_organization_allow_private_repository_forking_setting", + "update_project", + "update_project_card", + "update_project_column", + "update_project_draft_issue", + "update_project_next", + "update_project_next_item_field", + "update_pull_request", + "update_pull_request_branch", + "update_pull_request_review", + "update_pull_request_review_comment", + "update_ref", + "update_repository", + "update_sponsorship_preferences", + "update_subscription", + "update_team_discussion", + "update_team_discussion_comment", + "update_teams_repository", + "update_topics", + "verify_verifiable_domain", + ) + abort_queued_migrations = sgqlc.types.Field( + AbortQueuedMigrationsPayload, + graphql_name="abortQueuedMigrations", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(AbortQueuedMigrationsInput), graphql_name="input", default=None)),) + ), + ) + """Clear all of a customer's queued migrations + + Arguments: + + * `input` (`AbortQueuedMigrationsInput!`): Parameters for + AbortQueuedMigrations + """ + + accept_enterprise_administrator_invitation = sgqlc.types.Field( + AcceptEnterpriseAdministratorInvitationPayload, + graphql_name="acceptEnterpriseAdministratorInvitation", + args=sgqlc.types.ArgDict( + ( + ( + "input", + sgqlc.types.Arg(sgqlc.types.non_null(AcceptEnterpriseAdministratorInvitationInput), graphql_name="input", default=None), + ), + ) + ), + ) + """Accepts a pending invitation for a user to become an administrator + of an enterprise. + + Arguments: + + * `input` (`AcceptEnterpriseAdministratorInvitationInput!`): + Parameters for AcceptEnterpriseAdministratorInvitation + """ + + accept_topic_suggestion = sgqlc.types.Field( + AcceptTopicSuggestionPayload, + graphql_name="acceptTopicSuggestion", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(AcceptTopicSuggestionInput), graphql_name="input", default=None)),) + ), + ) + """Applies a suggested topic to the repository. + + Arguments: + + * `input` (`AcceptTopicSuggestionInput!`): Parameters for + AcceptTopicSuggestion + """ + + add_assignees_to_assignable = sgqlc.types.Field( + AddAssigneesToAssignablePayload, + graphql_name="addAssigneesToAssignable", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(AddAssigneesToAssignableInput), graphql_name="input", default=None)),) + ), + ) + """Adds assignees to an assignable object. + + Arguments: + + * `input` (`AddAssigneesToAssignableInput!`): Parameters for + AddAssigneesToAssignable + """ + + add_comment = sgqlc.types.Field( + AddCommentPayload, + graphql_name="addComment", + args=sgqlc.types.ArgDict((("input", sgqlc.types.Arg(sgqlc.types.non_null(AddCommentInput), graphql_name="input", default=None)),)), + ) + """Adds a comment to an Issue or Pull Request. + + Arguments: + + * `input` (`AddCommentInput!`): Parameters for AddComment + """ + + add_discussion_comment = sgqlc.types.Field( + AddDiscussionCommentPayload, + graphql_name="addDiscussionComment", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(AddDiscussionCommentInput), graphql_name="input", default=None)),) + ), + ) + """Adds a comment to a Discussion, possibly as a reply to another + comment. + + Arguments: + + * `input` (`AddDiscussionCommentInput!`): Parameters for + AddDiscussionComment + """ + + add_discussion_poll_vote = sgqlc.types.Field( + AddDiscussionPollVotePayload, + graphql_name="addDiscussionPollVote", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(AddDiscussionPollVoteInput), graphql_name="input", default=None)),) + ), + ) + """Vote for an option in a discussion poll. + + Arguments: + + * `input` (`AddDiscussionPollVoteInput!`): Parameters for + AddDiscussionPollVote + """ + + add_enterprise_support_entitlement = sgqlc.types.Field( + AddEnterpriseSupportEntitlementPayload, + graphql_name="addEnterpriseSupportEntitlement", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(AddEnterpriseSupportEntitlementInput), graphql_name="input", default=None)),) + ), + ) + """Adds a support entitlement to an enterprise member. + + Arguments: + + * `input` (`AddEnterpriseSupportEntitlementInput!`): Parameters + for AddEnterpriseSupportEntitlement + """ + + add_labels_to_labelable = sgqlc.types.Field( + AddLabelsToLabelablePayload, + graphql_name="addLabelsToLabelable", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(AddLabelsToLabelableInput), graphql_name="input", default=None)),) + ), + ) + """Adds labels to a labelable object. + + Arguments: + + * `input` (`AddLabelsToLabelableInput!`): Parameters for + AddLabelsToLabelable + """ + + add_project_card = sgqlc.types.Field( + AddProjectCardPayload, + graphql_name="addProjectCard", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(AddProjectCardInput), graphql_name="input", default=None)),) + ), + ) + """Adds a card to a ProjectColumn. Either `contentId` or `note` must + be provided but **not** both. + + Arguments: + + * `input` (`AddProjectCardInput!`): Parameters for AddProjectCard + """ + + add_project_column = sgqlc.types.Field( + AddProjectColumnPayload, + graphql_name="addProjectColumn", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(AddProjectColumnInput), graphql_name="input", default=None)),) + ), + ) + """Adds a column to a Project. + + Arguments: + + * `input` (`AddProjectColumnInput!`): Parameters for + AddProjectColumn + """ + + add_project_draft_issue = sgqlc.types.Field( + AddProjectDraftIssuePayload, + graphql_name="addProjectDraftIssue", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(AddProjectDraftIssueInput), graphql_name="input", default=None)),) + ), + ) + """Creates a new draft issue and add it to a Project. + + Arguments: + + * `input` (`AddProjectDraftIssueInput!`): Parameters for + AddProjectDraftIssue + """ + + add_project_next_item = sgqlc.types.Field( + AddProjectNextItemPayload, + graphql_name="addProjectNextItem", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(AddProjectNextItemInput), graphql_name="input", default=None)),) + ), + ) + """Adds an existing item (Issue or PullRequest) to a Project. + + Arguments: + + * `input` (`AddProjectNextItemInput!`): Parameters for + AddProjectNextItem + """ + + add_pull_request_review = sgqlc.types.Field( + AddPullRequestReviewPayload, + graphql_name="addPullRequestReview", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(AddPullRequestReviewInput), graphql_name="input", default=None)),) + ), + ) + """Adds a review to a Pull Request. + + Arguments: + + * `input` (`AddPullRequestReviewInput!`): Parameters for + AddPullRequestReview + """ + + add_pull_request_review_comment = sgqlc.types.Field( + AddPullRequestReviewCommentPayload, + graphql_name="addPullRequestReviewComment", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(AddPullRequestReviewCommentInput), graphql_name="input", default=None)),) + ), + ) + """Adds a comment to a review. + + Arguments: + + * `input` (`AddPullRequestReviewCommentInput!`): Parameters for + AddPullRequestReviewComment + """ + + add_pull_request_review_thread = sgqlc.types.Field( + AddPullRequestReviewThreadPayload, + graphql_name="addPullRequestReviewThread", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(AddPullRequestReviewThreadInput), graphql_name="input", default=None)),) + ), + ) + """Adds a new thread to a pending Pull Request Review. + + Arguments: + + * `input` (`AddPullRequestReviewThreadInput!`): Parameters for + AddPullRequestReviewThread + """ + + add_reaction = sgqlc.types.Field( + AddReactionPayload, + graphql_name="addReaction", + args=sgqlc.types.ArgDict((("input", sgqlc.types.Arg(sgqlc.types.non_null(AddReactionInput), graphql_name="input", default=None)),)), + ) + """Adds a reaction to a subject. + + Arguments: + + * `input` (`AddReactionInput!`): Parameters for AddReaction + """ + + add_star = sgqlc.types.Field( + AddStarPayload, + graphql_name="addStar", + args=sgqlc.types.ArgDict((("input", sgqlc.types.Arg(sgqlc.types.non_null(AddStarInput), graphql_name="input", default=None)),)), + ) + """Adds a star to a Starrable. + + Arguments: + + * `input` (`AddStarInput!`): Parameters for AddStar + """ + + add_upvote = sgqlc.types.Field( + AddUpvotePayload, + graphql_name="addUpvote", + args=sgqlc.types.ArgDict((("input", sgqlc.types.Arg(sgqlc.types.non_null(AddUpvoteInput), graphql_name="input", default=None)),)), + ) + """Add an upvote to a discussion or discussion comment. + + Arguments: + + * `input` (`AddUpvoteInput!`): Parameters for AddUpvote + """ + + add_verifiable_domain = sgqlc.types.Field( + AddVerifiableDomainPayload, + graphql_name="addVerifiableDomain", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(AddVerifiableDomainInput), graphql_name="input", default=None)),) + ), + ) + """Adds a verifiable domain to an owning account. + + Arguments: + + * `input` (`AddVerifiableDomainInput!`): Parameters for + AddVerifiableDomain + """ + + approve_deployments = sgqlc.types.Field( + ApproveDeploymentsPayload, + graphql_name="approveDeployments", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(ApproveDeploymentsInput), graphql_name="input", default=None)),) + ), + ) + """Approve all pending deployments under one or more environments + + Arguments: + + * `input` (`ApproveDeploymentsInput!`): Parameters for + ApproveDeployments + """ + + approve_verifiable_domain = sgqlc.types.Field( + ApproveVerifiableDomainPayload, + graphql_name="approveVerifiableDomain", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(ApproveVerifiableDomainInput), graphql_name="input", default=None)),) + ), + ) + """Approve a verifiable domain for notification delivery. + + Arguments: + + * `input` (`ApproveVerifiableDomainInput!`): Parameters for + ApproveVerifiableDomain + """ + + archive_repository = sgqlc.types.Field( + ArchiveRepositoryPayload, + graphql_name="archiveRepository", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(ArchiveRepositoryInput), graphql_name="input", default=None)),) + ), + ) + """Marks a repository as archived. + + Arguments: + + * `input` (`ArchiveRepositoryInput!`): Parameters for + ArchiveRepository + """ + + cancel_enterprise_admin_invitation = sgqlc.types.Field( + CancelEnterpriseAdminInvitationPayload, + graphql_name="cancelEnterpriseAdminInvitation", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(CancelEnterpriseAdminInvitationInput), graphql_name="input", default=None)),) + ), + ) + """Cancels a pending invitation for an administrator to join an + enterprise. + + Arguments: + + * `input` (`CancelEnterpriseAdminInvitationInput!`): Parameters + for CancelEnterpriseAdminInvitation + """ + + cancel_sponsorship = sgqlc.types.Field( + CancelSponsorshipPayload, + graphql_name="cancelSponsorship", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(CancelSponsorshipInput), graphql_name="input", default=None)),) + ), + ) + """Cancel an active sponsorship. + + Arguments: + + * `input` (`CancelSponsorshipInput!`): Parameters for + CancelSponsorship + """ + + change_user_status = sgqlc.types.Field( + ChangeUserStatusPayload, + graphql_name="changeUserStatus", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(ChangeUserStatusInput), graphql_name="input", default=None)),) + ), + ) + """Update your status on GitHub. + + Arguments: + + * `input` (`ChangeUserStatusInput!`): Parameters for + ChangeUserStatus + """ + + clear_labels_from_labelable = sgqlc.types.Field( + ClearLabelsFromLabelablePayload, + graphql_name="clearLabelsFromLabelable", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(ClearLabelsFromLabelableInput), graphql_name="input", default=None)),) + ), + ) + """Clears all labels from a labelable object. + + Arguments: + + * `input` (`ClearLabelsFromLabelableInput!`): Parameters for + ClearLabelsFromLabelable + """ + + clone_project = sgqlc.types.Field( + CloneProjectPayload, + graphql_name="cloneProject", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(CloneProjectInput), graphql_name="input", default=None)),) + ), + ) + """Creates a new project by cloning configuration from an existing + project. + + Arguments: + + * `input` (`CloneProjectInput!`): Parameters for CloneProject + """ + + clone_template_repository = sgqlc.types.Field( + CloneTemplateRepositoryPayload, + graphql_name="cloneTemplateRepository", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(CloneTemplateRepositoryInput), graphql_name="input", default=None)),) + ), + ) + """Create a new repository with the same files and directory + structure as a template repository. + + Arguments: + + * `input` (`CloneTemplateRepositoryInput!`): Parameters for + CloneTemplateRepository + """ + + close_issue = sgqlc.types.Field( + CloseIssuePayload, + graphql_name="closeIssue", + args=sgqlc.types.ArgDict((("input", sgqlc.types.Arg(sgqlc.types.non_null(CloseIssueInput), graphql_name="input", default=None)),)), + ) + """Close an issue. + + Arguments: + + * `input` (`CloseIssueInput!`): Parameters for CloseIssue + """ + + close_pull_request = sgqlc.types.Field( + ClosePullRequestPayload, + graphql_name="closePullRequest", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(ClosePullRequestInput), graphql_name="input", default=None)),) + ), + ) + """Close a pull request. + + Arguments: + + * `input` (`ClosePullRequestInput!`): Parameters for + ClosePullRequest + """ + + convert_project_card_note_to_issue = sgqlc.types.Field( + ConvertProjectCardNoteToIssuePayload, + graphql_name="convertProjectCardNoteToIssue", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(ConvertProjectCardNoteToIssueInput), graphql_name="input", default=None)),) + ), + ) + """Convert a project note card to one associated with a newly created + issue. + + Arguments: + + * `input` (`ConvertProjectCardNoteToIssueInput!`): Parameters for + ConvertProjectCardNoteToIssue + """ + + convert_pull_request_to_draft = sgqlc.types.Field( + ConvertPullRequestToDraftPayload, + graphql_name="convertPullRequestToDraft", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(ConvertPullRequestToDraftInput), graphql_name="input", default=None)),) + ), + ) + """Converts a pull request to draft + + Arguments: + + * `input` (`ConvertPullRequestToDraftInput!`): Parameters for + ConvertPullRequestToDraft + """ + + create_branch_protection_rule = sgqlc.types.Field( + CreateBranchProtectionRulePayload, + graphql_name="createBranchProtectionRule", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(CreateBranchProtectionRuleInput), graphql_name="input", default=None)),) + ), + ) + """Create a new branch protection rule + + Arguments: + + * `input` (`CreateBranchProtectionRuleInput!`): Parameters for + CreateBranchProtectionRule + """ + + create_check_run = sgqlc.types.Field( + CreateCheckRunPayload, + graphql_name="createCheckRun", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(CreateCheckRunInput), graphql_name="input", default=None)),) + ), + ) + """Create a check run. + + Arguments: + + * `input` (`CreateCheckRunInput!`): Parameters for CreateCheckRun + """ + + create_check_suite = sgqlc.types.Field( + CreateCheckSuitePayload, + graphql_name="createCheckSuite", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(CreateCheckSuiteInput), graphql_name="input", default=None)),) + ), + ) + """Create a check suite + + Arguments: + + * `input` (`CreateCheckSuiteInput!`): Parameters for + CreateCheckSuite + """ + + create_commit_on_branch = sgqlc.types.Field( + CreateCommitOnBranchPayload, + graphql_name="createCommitOnBranch", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(CreateCommitOnBranchInput), graphql_name="input", default=None)),) + ), + ) + """Appends a commit to the given branch as the authenticated user. + This mutation creates a commit whose parent is the HEAD of the + provided branch and also updates that branch to point to the new + commit. It can be thought of as similar to `git commit`. ### + Locating a Branch Commits are appended to a `branch` of type + `Ref`. This must refer to a git branch (i.e. the fully qualified + path must begin with `refs/heads/`, although including this prefix + is optional. Callers may specify the `branch` to commit to either + by its global node ID or by passing both of + `repositoryNameWithOwner` and `refName`. For more details see the + documentation for `CommittableBranch`. ### Describing Changes + `fileChanges` are specified as a `FilesChanges` object describing + `FileAdditions` and `FileDeletions`. Please see the documentation + for `FileChanges` for more information on how to use this argument + to describe any set of file changes. ### Authorship Similar to + the web commit interface, this mutation does not support + specifying the author or committer of the commit and will not add + support for this in the future. A commit created by a successful + execution of this mutation will be authored by the owner of the + credential which authenticates the API request. The committer + will be identical to that of commits authored using the web + interface. If you need full control over author and committer + information, please use the Git Database REST API instead. ### + Commit Signing Commits made using this mutation are automatically + signed by GitHub if supported and will be marked as verified in + the user interface. + + Arguments: + + * `input` (`CreateCommitOnBranchInput!`): Parameters for + CreateCommitOnBranch + """ + + create_discussion = sgqlc.types.Field( + CreateDiscussionPayload, + graphql_name="createDiscussion", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(CreateDiscussionInput), graphql_name="input", default=None)),) + ), + ) + """Create a discussion. + + Arguments: + + * `input` (`CreateDiscussionInput!`): Parameters for + CreateDiscussion + """ + + create_enterprise_organization = sgqlc.types.Field( + CreateEnterpriseOrganizationPayload, + graphql_name="createEnterpriseOrganization", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(CreateEnterpriseOrganizationInput), graphql_name="input", default=None)),) + ), + ) + """Creates an organization as part of an enterprise account. + + Arguments: + + * `input` (`CreateEnterpriseOrganizationInput!`): Parameters for + CreateEnterpriseOrganization + """ + + create_environment = sgqlc.types.Field( + CreateEnvironmentPayload, + graphql_name="createEnvironment", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(CreateEnvironmentInput), graphql_name="input", default=None)),) + ), + ) + """Creates an environment or simply returns it if already exists. + + Arguments: + + * `input` (`CreateEnvironmentInput!`): Parameters for + CreateEnvironment + """ + + create_ip_allow_list_entry = sgqlc.types.Field( + CreateIpAllowListEntryPayload, + graphql_name="createIpAllowListEntry", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(CreateIpAllowListEntryInput), graphql_name="input", default=None)),) + ), + ) + """Creates a new IP allow list entry. + + Arguments: + + * `input` (`CreateIpAllowListEntryInput!`): Parameters for + CreateIpAllowListEntry + """ + + create_issue = sgqlc.types.Field( + CreateIssuePayload, + graphql_name="createIssue", + args=sgqlc.types.ArgDict((("input", sgqlc.types.Arg(sgqlc.types.non_null(CreateIssueInput), graphql_name="input", default=None)),)), + ) + """Creates a new issue. + + Arguments: + + * `input` (`CreateIssueInput!`): Parameters for CreateIssue + """ + + create_migration_source = sgqlc.types.Field( + CreateMigrationSourcePayload, + graphql_name="createMigrationSource", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(CreateMigrationSourceInput), graphql_name="input", default=None)),) + ), + ) + """Creates an Octoshift migration source. + + Arguments: + + * `input` (`CreateMigrationSourceInput!`): Parameters for + CreateMigrationSource + """ + + create_project = sgqlc.types.Field( + CreateProjectPayload, + graphql_name="createProject", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(CreateProjectInput), graphql_name="input", default=None)),) + ), + ) + """Creates a new project. + + Arguments: + + * `input` (`CreateProjectInput!`): Parameters for CreateProject + """ + + create_pull_request = sgqlc.types.Field( + CreatePullRequestPayload, + graphql_name="createPullRequest", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(CreatePullRequestInput), graphql_name="input", default=None)),) + ), + ) + """Create a new pull request + + Arguments: + + * `input` (`CreatePullRequestInput!`): Parameters for + CreatePullRequest + """ + + create_ref = sgqlc.types.Field( + CreateRefPayload, + graphql_name="createRef", + args=sgqlc.types.ArgDict((("input", sgqlc.types.Arg(sgqlc.types.non_null(CreateRefInput), graphql_name="input", default=None)),)), + ) + """Create a new Git Ref. + + Arguments: + + * `input` (`CreateRefInput!`): Parameters for CreateRef + """ + + create_repository = sgqlc.types.Field( + CreateRepositoryPayload, + graphql_name="createRepository", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(CreateRepositoryInput), graphql_name="input", default=None)),) + ), + ) + """Create a new repository. + + Arguments: + + * `input` (`CreateRepositoryInput!`): Parameters for + CreateRepository + """ + + create_sponsors_tier = sgqlc.types.Field( + CreateSponsorsTierPayload, + graphql_name="createSponsorsTier", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(CreateSponsorsTierInput), graphql_name="input", default=None)),) + ), + ) + """Create a new payment tier for your GitHub Sponsors profile. + + Arguments: + + * `input` (`CreateSponsorsTierInput!`): Parameters for + CreateSponsorsTier + """ + + create_sponsorship = sgqlc.types.Field( + CreateSponsorshipPayload, + graphql_name="createSponsorship", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(CreateSponsorshipInput), graphql_name="input", default=None)),) + ), + ) + """Start a new sponsorship of a maintainer in GitHub Sponsors, or + reactivate a past sponsorship. + + Arguments: + + * `input` (`CreateSponsorshipInput!`): Parameters for + CreateSponsorship + """ + + create_team_discussion = sgqlc.types.Field( + CreateTeamDiscussionPayload, + graphql_name="createTeamDiscussion", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(CreateTeamDiscussionInput), graphql_name="input", default=None)),) + ), + ) + """Creates a new team discussion. + + Arguments: + + * `input` (`CreateTeamDiscussionInput!`): Parameters for + CreateTeamDiscussion + """ + + create_team_discussion_comment = sgqlc.types.Field( + CreateTeamDiscussionCommentPayload, + graphql_name="createTeamDiscussionComment", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(CreateTeamDiscussionCommentInput), graphql_name="input", default=None)),) + ), + ) + """Creates a new team discussion comment. + + Arguments: + + * `input` (`CreateTeamDiscussionCommentInput!`): Parameters for + CreateTeamDiscussionComment + """ + + decline_topic_suggestion = sgqlc.types.Field( + DeclineTopicSuggestionPayload, + graphql_name="declineTopicSuggestion", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(DeclineTopicSuggestionInput), graphql_name="input", default=None)),) + ), + ) + """Rejects a suggested topic for the repository. + + Arguments: + + * `input` (`DeclineTopicSuggestionInput!`): Parameters for + DeclineTopicSuggestion + """ + + delete_branch_protection_rule = sgqlc.types.Field( + DeleteBranchProtectionRulePayload, + graphql_name="deleteBranchProtectionRule", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(DeleteBranchProtectionRuleInput), graphql_name="input", default=None)),) + ), + ) + """Delete a branch protection rule + + Arguments: + + * `input` (`DeleteBranchProtectionRuleInput!`): Parameters for + DeleteBranchProtectionRule + """ + + delete_deployment = sgqlc.types.Field( + DeleteDeploymentPayload, + graphql_name="deleteDeployment", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(DeleteDeploymentInput), graphql_name="input", default=None)),) + ), + ) + """Deletes a deployment. + + Arguments: + + * `input` (`DeleteDeploymentInput!`): Parameters for + DeleteDeployment + """ + + delete_discussion = sgqlc.types.Field( + DeleteDiscussionPayload, + graphql_name="deleteDiscussion", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(DeleteDiscussionInput), graphql_name="input", default=None)),) + ), + ) + """Delete a discussion and all of its replies. + + Arguments: + + * `input` (`DeleteDiscussionInput!`): Parameters for + DeleteDiscussion + """ + + delete_discussion_comment = sgqlc.types.Field( + DeleteDiscussionCommentPayload, + graphql_name="deleteDiscussionComment", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(DeleteDiscussionCommentInput), graphql_name="input", default=None)),) + ), + ) + """Delete a discussion comment. If it has replies, wipe it instead. + + Arguments: + + * `input` (`DeleteDiscussionCommentInput!`): Parameters for + DeleteDiscussionComment + """ + + delete_environment = sgqlc.types.Field( + DeleteEnvironmentPayload, + graphql_name="deleteEnvironment", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(DeleteEnvironmentInput), graphql_name="input", default=None)),) + ), + ) + """Deletes an environment + + Arguments: + + * `input` (`DeleteEnvironmentInput!`): Parameters for + DeleteEnvironment + """ + + delete_ip_allow_list_entry = sgqlc.types.Field( + DeleteIpAllowListEntryPayload, + graphql_name="deleteIpAllowListEntry", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(DeleteIpAllowListEntryInput), graphql_name="input", default=None)),) + ), + ) + """Deletes an IP allow list entry. + + Arguments: + + * `input` (`DeleteIpAllowListEntryInput!`): Parameters for + DeleteIpAllowListEntry + """ + + delete_issue = sgqlc.types.Field( + DeleteIssuePayload, + graphql_name="deleteIssue", + args=sgqlc.types.ArgDict((("input", sgqlc.types.Arg(sgqlc.types.non_null(DeleteIssueInput), graphql_name="input", default=None)),)), + ) + """Deletes an Issue object. + + Arguments: + + * `input` (`DeleteIssueInput!`): Parameters for DeleteIssue + """ + + delete_issue_comment = sgqlc.types.Field( + DeleteIssueCommentPayload, + graphql_name="deleteIssueComment", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(DeleteIssueCommentInput), graphql_name="input", default=None)),) + ), + ) + """Deletes an IssueComment object. + + Arguments: + + * `input` (`DeleteIssueCommentInput!`): Parameters for + DeleteIssueComment + """ + + delete_project = sgqlc.types.Field( + DeleteProjectPayload, + graphql_name="deleteProject", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(DeleteProjectInput), graphql_name="input", default=None)),) + ), + ) + """Deletes a project. + + Arguments: + + * `input` (`DeleteProjectInput!`): Parameters for DeleteProject + """ + + delete_project_card = sgqlc.types.Field( + DeleteProjectCardPayload, + graphql_name="deleteProjectCard", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(DeleteProjectCardInput), graphql_name="input", default=None)),) + ), + ) + """Deletes a project card. + + Arguments: + + * `input` (`DeleteProjectCardInput!`): Parameters for + DeleteProjectCard + """ + + delete_project_column = sgqlc.types.Field( + DeleteProjectColumnPayload, + graphql_name="deleteProjectColumn", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(DeleteProjectColumnInput), graphql_name="input", default=None)),) + ), + ) + """Deletes a project column. + + Arguments: + + * `input` (`DeleteProjectColumnInput!`): Parameters for + DeleteProjectColumn + """ + + delete_project_next_item = sgqlc.types.Field( + DeleteProjectNextItemPayload, + graphql_name="deleteProjectNextItem", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(DeleteProjectNextItemInput), graphql_name="input", default=None)),) + ), + ) + """Deletes an item from a Project. + + Arguments: + + * `input` (`DeleteProjectNextItemInput!`): Parameters for + DeleteProjectNextItem + """ + + delete_pull_request_review = sgqlc.types.Field( + DeletePullRequestReviewPayload, + graphql_name="deletePullRequestReview", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(DeletePullRequestReviewInput), graphql_name="input", default=None)),) + ), + ) + """Deletes a pull request review. + + Arguments: + + * `input` (`DeletePullRequestReviewInput!`): Parameters for + DeletePullRequestReview + """ + + delete_pull_request_review_comment = sgqlc.types.Field( + DeletePullRequestReviewCommentPayload, + graphql_name="deletePullRequestReviewComment", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(DeletePullRequestReviewCommentInput), graphql_name="input", default=None)),) + ), + ) + """Deletes a pull request review comment. + + Arguments: + + * `input` (`DeletePullRequestReviewCommentInput!`): Parameters for + DeletePullRequestReviewComment + """ + + delete_ref = sgqlc.types.Field( + DeleteRefPayload, + graphql_name="deleteRef", + args=sgqlc.types.ArgDict((("input", sgqlc.types.Arg(sgqlc.types.non_null(DeleteRefInput), graphql_name="input", default=None)),)), + ) + """Delete a Git Ref. + + Arguments: + + * `input` (`DeleteRefInput!`): Parameters for DeleteRef + """ + + delete_team_discussion = sgqlc.types.Field( + DeleteTeamDiscussionPayload, + graphql_name="deleteTeamDiscussion", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(DeleteTeamDiscussionInput), graphql_name="input", default=None)),) + ), + ) + """Deletes a team discussion. + + Arguments: + + * `input` (`DeleteTeamDiscussionInput!`): Parameters for + DeleteTeamDiscussion + """ + + delete_team_discussion_comment = sgqlc.types.Field( + DeleteTeamDiscussionCommentPayload, + graphql_name="deleteTeamDiscussionComment", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(DeleteTeamDiscussionCommentInput), graphql_name="input", default=None)),) + ), + ) + """Deletes a team discussion comment. + + Arguments: + + * `input` (`DeleteTeamDiscussionCommentInput!`): Parameters for + DeleteTeamDiscussionComment + """ + + delete_verifiable_domain = sgqlc.types.Field( + DeleteVerifiableDomainPayload, + graphql_name="deleteVerifiableDomain", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(DeleteVerifiableDomainInput), graphql_name="input", default=None)),) + ), + ) + """Deletes a verifiable domain. + + Arguments: + + * `input` (`DeleteVerifiableDomainInput!`): Parameters for + DeleteVerifiableDomain + """ + + disable_pull_request_auto_merge = sgqlc.types.Field( + DisablePullRequestAutoMergePayload, + graphql_name="disablePullRequestAutoMerge", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(DisablePullRequestAutoMergeInput), graphql_name="input", default=None)),) + ), + ) + """Disable auto merge on the given pull request + + Arguments: + + * `input` (`DisablePullRequestAutoMergeInput!`): Parameters for + DisablePullRequestAutoMerge + """ + + dismiss_pull_request_review = sgqlc.types.Field( + DismissPullRequestReviewPayload, + graphql_name="dismissPullRequestReview", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(DismissPullRequestReviewInput), graphql_name="input", default=None)),) + ), + ) + """Dismisses an approved or rejected pull request review. + + Arguments: + + * `input` (`DismissPullRequestReviewInput!`): Parameters for + DismissPullRequestReview + """ + + dismiss_repository_vulnerability_alert = sgqlc.types.Field( + DismissRepositoryVulnerabilityAlertPayload, + graphql_name="dismissRepositoryVulnerabilityAlert", + args=sgqlc.types.ArgDict( + ( + ( + "input", + sgqlc.types.Arg(sgqlc.types.non_null(DismissRepositoryVulnerabilityAlertInput), graphql_name="input", default=None), + ), + ) + ), + ) + """Dismisses the Dependabot alert. + + Arguments: + + * `input` (`DismissRepositoryVulnerabilityAlertInput!`): + Parameters for DismissRepositoryVulnerabilityAlert + """ + + enable_pull_request_auto_merge = sgqlc.types.Field( + EnablePullRequestAutoMergePayload, + graphql_name="enablePullRequestAutoMerge", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(EnablePullRequestAutoMergeInput), graphql_name="input", default=None)),) + ), + ) + """Enable the default auto-merge on a pull request. + + Arguments: + + * `input` (`EnablePullRequestAutoMergeInput!`): Parameters for + EnablePullRequestAutoMerge + """ + + follow_organization = sgqlc.types.Field( + FollowOrganizationPayload, + graphql_name="followOrganization", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(FollowOrganizationInput), graphql_name="input", default=None)),) + ), + ) + """Follow an organization. + + Arguments: + + * `input` (`FollowOrganizationInput!`): Parameters for + FollowOrganization + """ + + follow_user = sgqlc.types.Field( + FollowUserPayload, + graphql_name="followUser", + args=sgqlc.types.ArgDict((("input", sgqlc.types.Arg(sgqlc.types.non_null(FollowUserInput), graphql_name="input", default=None)),)), + ) + """Follow a user. + + Arguments: + + * `input` (`FollowUserInput!`): Parameters for FollowUser + """ + + grant_enterprise_organizations_migrator_role = sgqlc.types.Field( + GrantEnterpriseOrganizationsMigratorRolePayload, + graphql_name="grantEnterpriseOrganizationsMigratorRole", + args=sgqlc.types.ArgDict( + ( + ( + "input", + sgqlc.types.Arg( + sgqlc.types.non_null(GrantEnterpriseOrganizationsMigratorRoleInput), graphql_name="input", default=None + ), + ), + ) + ), + ) + """Grant the migrator role to a user for all organizations under an + enterprise account. + + Arguments: + + * `input` (`GrantEnterpriseOrganizationsMigratorRoleInput!`): + Parameters for GrantEnterpriseOrganizationsMigratorRole + """ + + grant_migrator_role = sgqlc.types.Field( + GrantMigratorRolePayload, + graphql_name="grantMigratorRole", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(GrantMigratorRoleInput), graphql_name="input", default=None)),) + ), + ) + """Grant the migrator role to a user or a team. + + Arguments: + + * `input` (`GrantMigratorRoleInput!`): Parameters for + GrantMigratorRole + """ + + invite_enterprise_admin = sgqlc.types.Field( + InviteEnterpriseAdminPayload, + graphql_name="inviteEnterpriseAdmin", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(InviteEnterpriseAdminInput), graphql_name="input", default=None)),) + ), + ) + """Invite someone to become an administrator of the enterprise. + + Arguments: + + * `input` (`InviteEnterpriseAdminInput!`): Parameters for + InviteEnterpriseAdmin + """ + + link_repository_to_project = sgqlc.types.Field( + LinkRepositoryToProjectPayload, + graphql_name="linkRepositoryToProject", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(LinkRepositoryToProjectInput), graphql_name="input", default=None)),) + ), + ) + """Creates a repository link for a project. + + Arguments: + + * `input` (`LinkRepositoryToProjectInput!`): Parameters for + LinkRepositoryToProject + """ + + lock_lockable = sgqlc.types.Field( + LockLockablePayload, + graphql_name="lockLockable", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(LockLockableInput), graphql_name="input", default=None)),) + ), + ) + """Lock a lockable object + + Arguments: + + * `input` (`LockLockableInput!`): Parameters for LockLockable + """ + + mark_discussion_comment_as_answer = sgqlc.types.Field( + MarkDiscussionCommentAsAnswerPayload, + graphql_name="markDiscussionCommentAsAnswer", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(MarkDiscussionCommentAsAnswerInput), graphql_name="input", default=None)),) + ), + ) + """Mark a discussion comment as the chosen answer for discussions in + an answerable category. + + Arguments: + + * `input` (`MarkDiscussionCommentAsAnswerInput!`): Parameters for + MarkDiscussionCommentAsAnswer + """ + + mark_file_as_viewed = sgqlc.types.Field( + MarkFileAsViewedPayload, + graphql_name="markFileAsViewed", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(MarkFileAsViewedInput), graphql_name="input", default=None)),) + ), + ) + """Mark a pull request file as viewed + + Arguments: + + * `input` (`MarkFileAsViewedInput!`): Parameters for + MarkFileAsViewed + """ + + mark_pull_request_ready_for_review = sgqlc.types.Field( + MarkPullRequestReadyForReviewPayload, + graphql_name="markPullRequestReadyForReview", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(MarkPullRequestReadyForReviewInput), graphql_name="input", default=None)),) + ), + ) + """Marks a pull request ready for review. + + Arguments: + + * `input` (`MarkPullRequestReadyForReviewInput!`): Parameters for + MarkPullRequestReadyForReview + """ + + merge_branch = sgqlc.types.Field( + MergeBranchPayload, + graphql_name="mergeBranch", + args=sgqlc.types.ArgDict((("input", sgqlc.types.Arg(sgqlc.types.non_null(MergeBranchInput), graphql_name="input", default=None)),)), + ) + """Merge a head into a branch. + + Arguments: + + * `input` (`MergeBranchInput!`): Parameters for MergeBranch + """ + + merge_pull_request = sgqlc.types.Field( + MergePullRequestPayload, + graphql_name="mergePullRequest", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(MergePullRequestInput), graphql_name="input", default=None)),) + ), + ) + """Merge a pull request. + + Arguments: + + * `input` (`MergePullRequestInput!`): Parameters for + MergePullRequest + """ + + minimize_comment = sgqlc.types.Field( + MinimizeCommentPayload, + graphql_name="minimizeComment", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(MinimizeCommentInput), graphql_name="input", default=None)),) + ), + ) + """Minimizes a comment on an Issue, Commit, Pull Request, or Gist + + Arguments: + + * `input` (`MinimizeCommentInput!`): Parameters for + MinimizeComment + """ + + move_project_card = sgqlc.types.Field( + MoveProjectCardPayload, + graphql_name="moveProjectCard", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(MoveProjectCardInput), graphql_name="input", default=None)),) + ), + ) + """Moves a project card to another place. + + Arguments: + + * `input` (`MoveProjectCardInput!`): Parameters for + MoveProjectCard + """ + + move_project_column = sgqlc.types.Field( + MoveProjectColumnPayload, + graphql_name="moveProjectColumn", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(MoveProjectColumnInput), graphql_name="input", default=None)),) + ), + ) + """Moves a project column to another place. + + Arguments: + + * `input` (`MoveProjectColumnInput!`): Parameters for + MoveProjectColumn + """ + + pin_issue = sgqlc.types.Field( + "PinIssuePayload", + graphql_name="pinIssue", + args=sgqlc.types.ArgDict((("input", sgqlc.types.Arg(sgqlc.types.non_null(PinIssueInput), graphql_name="input", default=None)),)), + ) + """Pin an issue to a repository + + Arguments: + + * `input` (`PinIssueInput!`): Parameters for PinIssue + """ + + regenerate_enterprise_identity_provider_recovery_codes = sgqlc.types.Field( + "RegenerateEnterpriseIdentityProviderRecoveryCodesPayload", + graphql_name="regenerateEnterpriseIdentityProviderRecoveryCodes", + args=sgqlc.types.ArgDict( + ( + ( + "input", + sgqlc.types.Arg( + sgqlc.types.non_null(RegenerateEnterpriseIdentityProviderRecoveryCodesInput), graphql_name="input", default=None + ), + ), + ) + ), + ) + """Regenerates the identity provider recovery codes for an enterprise + + Arguments: + + * `input` + (`RegenerateEnterpriseIdentityProviderRecoveryCodesInput!`): + Parameters for RegenerateEnterpriseIdentityProviderRecoveryCodes + """ + + regenerate_verifiable_domain_token = sgqlc.types.Field( + "RegenerateVerifiableDomainTokenPayload", + graphql_name="regenerateVerifiableDomainToken", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(RegenerateVerifiableDomainTokenInput), graphql_name="input", default=None)),) + ), + ) + """Regenerates a verifiable domain's verification token. + + Arguments: + + * `input` (`RegenerateVerifiableDomainTokenInput!`): Parameters + for RegenerateVerifiableDomainToken + """ + + reject_deployments = sgqlc.types.Field( + "RejectDeploymentsPayload", + graphql_name="rejectDeployments", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(RejectDeploymentsInput), graphql_name="input", default=None)),) + ), + ) + """Reject all pending deployments under one or more environments + + Arguments: + + * `input` (`RejectDeploymentsInput!`): Parameters for + RejectDeployments + """ + + remove_assignees_from_assignable = sgqlc.types.Field( + "RemoveAssigneesFromAssignablePayload", + graphql_name="removeAssigneesFromAssignable", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(RemoveAssigneesFromAssignableInput), graphql_name="input", default=None)),) + ), + ) + """Removes assignees from an assignable object. + + Arguments: + + * `input` (`RemoveAssigneesFromAssignableInput!`): Parameters for + RemoveAssigneesFromAssignable + """ + + remove_enterprise_admin = sgqlc.types.Field( + "RemoveEnterpriseAdminPayload", + graphql_name="removeEnterpriseAdmin", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(RemoveEnterpriseAdminInput), graphql_name="input", default=None)),) + ), + ) + """Removes an administrator from the enterprise. + + Arguments: + + * `input` (`RemoveEnterpriseAdminInput!`): Parameters for + RemoveEnterpriseAdmin + """ + + remove_enterprise_identity_provider = sgqlc.types.Field( + "RemoveEnterpriseIdentityProviderPayload", + graphql_name="removeEnterpriseIdentityProvider", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(RemoveEnterpriseIdentityProviderInput), graphql_name="input", default=None)),) + ), + ) + """Removes the identity provider from an enterprise + + Arguments: + + * `input` (`RemoveEnterpriseIdentityProviderInput!`): Parameters + for RemoveEnterpriseIdentityProvider + """ + + remove_enterprise_organization = sgqlc.types.Field( + "RemoveEnterpriseOrganizationPayload", + graphql_name="removeEnterpriseOrganization", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(RemoveEnterpriseOrganizationInput), graphql_name="input", default=None)),) + ), + ) + """Removes an organization from the enterprise + + Arguments: + + * `input` (`RemoveEnterpriseOrganizationInput!`): Parameters for + RemoveEnterpriseOrganization + """ + + remove_enterprise_support_entitlement = sgqlc.types.Field( + "RemoveEnterpriseSupportEntitlementPayload", + graphql_name="removeEnterpriseSupportEntitlement", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(RemoveEnterpriseSupportEntitlementInput), graphql_name="input", default=None)),) + ), + ) + """Removes a support entitlement from an enterprise member. + + Arguments: + + * `input` (`RemoveEnterpriseSupportEntitlementInput!`): Parameters + for RemoveEnterpriseSupportEntitlement + """ + + remove_labels_from_labelable = sgqlc.types.Field( + "RemoveLabelsFromLabelablePayload", + graphql_name="removeLabelsFromLabelable", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(RemoveLabelsFromLabelableInput), graphql_name="input", default=None)),) + ), + ) + """Removes labels from a Labelable object. + + Arguments: + + * `input` (`RemoveLabelsFromLabelableInput!`): Parameters for + RemoveLabelsFromLabelable + """ + + remove_outside_collaborator = sgqlc.types.Field( + "RemoveOutsideCollaboratorPayload", + graphql_name="removeOutsideCollaborator", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(RemoveOutsideCollaboratorInput), graphql_name="input", default=None)),) + ), + ) + """Removes outside collaborator from all repositories in an + organization. + + Arguments: + + * `input` (`RemoveOutsideCollaboratorInput!`): Parameters for + RemoveOutsideCollaborator + """ + + remove_reaction = sgqlc.types.Field( + "RemoveReactionPayload", + graphql_name="removeReaction", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(RemoveReactionInput), graphql_name="input", default=None)),) + ), + ) + """Removes a reaction from a subject. + + Arguments: + + * `input` (`RemoveReactionInput!`): Parameters for RemoveReaction + """ + + remove_star = sgqlc.types.Field( + "RemoveStarPayload", + graphql_name="removeStar", + args=sgqlc.types.ArgDict((("input", sgqlc.types.Arg(sgqlc.types.non_null(RemoveStarInput), graphql_name="input", default=None)),)), + ) + """Removes a star from a Starrable. + + Arguments: + + * `input` (`RemoveStarInput!`): Parameters for RemoveStar + """ + + remove_upvote = sgqlc.types.Field( + "RemoveUpvotePayload", + graphql_name="removeUpvote", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(RemoveUpvoteInput), graphql_name="input", default=None)),) + ), + ) + """Remove an upvote to a discussion or discussion comment. + + Arguments: + + * `input` (`RemoveUpvoteInput!`): Parameters for RemoveUpvote + """ + + reopen_issue = sgqlc.types.Field( + "ReopenIssuePayload", + graphql_name="reopenIssue", + args=sgqlc.types.ArgDict((("input", sgqlc.types.Arg(sgqlc.types.non_null(ReopenIssueInput), graphql_name="input", default=None)),)), + ) + """Reopen a issue. + + Arguments: + + * `input` (`ReopenIssueInput!`): Parameters for ReopenIssue + """ + + reopen_pull_request = sgqlc.types.Field( + "ReopenPullRequestPayload", + graphql_name="reopenPullRequest", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(ReopenPullRequestInput), graphql_name="input", default=None)),) + ), + ) + """Reopen a pull request. + + Arguments: + + * `input` (`ReopenPullRequestInput!`): Parameters for + ReopenPullRequest + """ + + request_reviews = sgqlc.types.Field( + "RequestReviewsPayload", + graphql_name="requestReviews", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(RequestReviewsInput), graphql_name="input", default=None)),) + ), + ) + """Set review requests on a pull request. + + Arguments: + + * `input` (`RequestReviewsInput!`): Parameters for RequestReviews + """ + + rerequest_check_suite = sgqlc.types.Field( + "RerequestCheckSuitePayload", + graphql_name="rerequestCheckSuite", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(RerequestCheckSuiteInput), graphql_name="input", default=None)),) + ), + ) + """Rerequests an existing check suite. + + Arguments: + + * `input` (`RerequestCheckSuiteInput!`): Parameters for + RerequestCheckSuite + """ + + resolve_review_thread = sgqlc.types.Field( + "ResolveReviewThreadPayload", + graphql_name="resolveReviewThread", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(ResolveReviewThreadInput), graphql_name="input", default=None)),) + ), + ) + """Marks a review thread as resolved. + + Arguments: + + * `input` (`ResolveReviewThreadInput!`): Parameters for + ResolveReviewThread + """ + + revoke_enterprise_organizations_migrator_role = sgqlc.types.Field( + "RevokeEnterpriseOrganizationsMigratorRolePayload", + graphql_name="revokeEnterpriseOrganizationsMigratorRole", + args=sgqlc.types.ArgDict( + ( + ( + "input", + sgqlc.types.Arg( + sgqlc.types.non_null(RevokeEnterpriseOrganizationsMigratorRoleInput), graphql_name="input", default=None + ), + ), + ) + ), + ) + """Revoke the migrator role to a user for all organizations under an + enterprise account. + + Arguments: + + * `input` (`RevokeEnterpriseOrganizationsMigratorRoleInput!`): + Parameters for RevokeEnterpriseOrganizationsMigratorRole + """ + + revoke_migrator_role = sgqlc.types.Field( + "RevokeMigratorRolePayload", + graphql_name="revokeMigratorRole", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(RevokeMigratorRoleInput), graphql_name="input", default=None)),) + ), + ) + """Revoke the migrator role from a user or a team. + + Arguments: + + * `input` (`RevokeMigratorRoleInput!`): Parameters for + RevokeMigratorRole + """ + + set_enterprise_identity_provider = sgqlc.types.Field( + "SetEnterpriseIdentityProviderPayload", + graphql_name="setEnterpriseIdentityProvider", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(SetEnterpriseIdentityProviderInput), graphql_name="input", default=None)),) + ), + ) + """Creates or updates the identity provider for an enterprise. + + Arguments: + + * `input` (`SetEnterpriseIdentityProviderInput!`): Parameters for + SetEnterpriseIdentityProvider + """ + + set_organization_interaction_limit = sgqlc.types.Field( + "SetOrganizationInteractionLimitPayload", + graphql_name="setOrganizationInteractionLimit", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(SetOrganizationInteractionLimitInput), graphql_name="input", default=None)),) + ), + ) + """Set an organization level interaction limit for an organization's + public repositories. + + Arguments: + + * `input` (`SetOrganizationInteractionLimitInput!`): Parameters + for SetOrganizationInteractionLimit + """ + + set_repository_interaction_limit = sgqlc.types.Field( + "SetRepositoryInteractionLimitPayload", + graphql_name="setRepositoryInteractionLimit", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(SetRepositoryInteractionLimitInput), graphql_name="input", default=None)),) + ), + ) + """Sets an interaction limit setting for a repository. + + Arguments: + + * `input` (`SetRepositoryInteractionLimitInput!`): Parameters for + SetRepositoryInteractionLimit + """ + + set_user_interaction_limit = sgqlc.types.Field( + "SetUserInteractionLimitPayload", + graphql_name="setUserInteractionLimit", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(SetUserInteractionLimitInput), graphql_name="input", default=None)),) + ), + ) + """Set a user level interaction limit for an user's public + repositories. + + Arguments: + + * `input` (`SetUserInteractionLimitInput!`): Parameters for + SetUserInteractionLimit + """ + + start_repository_migration = sgqlc.types.Field( + "StartRepositoryMigrationPayload", + graphql_name="startRepositoryMigration", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(StartRepositoryMigrationInput), graphql_name="input", default=None)),) + ), + ) + """Start a repository migration. + + Arguments: + + * `input` (`StartRepositoryMigrationInput!`): Parameters for + StartRepositoryMigration + """ + + submit_pull_request_review = sgqlc.types.Field( + "SubmitPullRequestReviewPayload", + graphql_name="submitPullRequestReview", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(SubmitPullRequestReviewInput), graphql_name="input", default=None)),) + ), + ) + """Submits a pending pull request review. + + Arguments: + + * `input` (`SubmitPullRequestReviewInput!`): Parameters for + SubmitPullRequestReview + """ + + transfer_issue = sgqlc.types.Field( + "TransferIssuePayload", + graphql_name="transferIssue", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(TransferIssueInput), graphql_name="input", default=None)),) + ), + ) + """Transfer an issue to a different repository + + Arguments: + + * `input` (`TransferIssueInput!`): Parameters for TransferIssue + """ + + unarchive_repository = sgqlc.types.Field( + "UnarchiveRepositoryPayload", + graphql_name="unarchiveRepository", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UnarchiveRepositoryInput), graphql_name="input", default=None)),) + ), + ) + """Unarchives a repository. + + Arguments: + + * `input` (`UnarchiveRepositoryInput!`): Parameters for + UnarchiveRepository + """ + + unfollow_organization = sgqlc.types.Field( + "UnfollowOrganizationPayload", + graphql_name="unfollowOrganization", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UnfollowOrganizationInput), graphql_name="input", default=None)),) + ), + ) + """Unfollow an organization. + + Arguments: + + * `input` (`UnfollowOrganizationInput!`): Parameters for + UnfollowOrganization + """ + + unfollow_user = sgqlc.types.Field( + "UnfollowUserPayload", + graphql_name="unfollowUser", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UnfollowUserInput), graphql_name="input", default=None)),) + ), + ) + """Unfollow a user. + + Arguments: + + * `input` (`UnfollowUserInput!`): Parameters for UnfollowUser + """ + + unlink_repository_from_project = sgqlc.types.Field( + "UnlinkRepositoryFromProjectPayload", + graphql_name="unlinkRepositoryFromProject", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UnlinkRepositoryFromProjectInput), graphql_name="input", default=None)),) + ), + ) + """Deletes a repository link from a project. + + Arguments: + + * `input` (`UnlinkRepositoryFromProjectInput!`): Parameters for + UnlinkRepositoryFromProject + """ + + unlock_lockable = sgqlc.types.Field( + "UnlockLockablePayload", + graphql_name="unlockLockable", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UnlockLockableInput), graphql_name="input", default=None)),) + ), + ) + """Unlock a lockable object + + Arguments: + + * `input` (`UnlockLockableInput!`): Parameters for UnlockLockable + """ + + unmark_discussion_comment_as_answer = sgqlc.types.Field( + "UnmarkDiscussionCommentAsAnswerPayload", + graphql_name="unmarkDiscussionCommentAsAnswer", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UnmarkDiscussionCommentAsAnswerInput), graphql_name="input", default=None)),) + ), + ) + """Unmark a discussion comment as the chosen answer for discussions + in an answerable category. + + Arguments: + + * `input` (`UnmarkDiscussionCommentAsAnswerInput!`): Parameters + for UnmarkDiscussionCommentAsAnswer + """ + + unmark_file_as_viewed = sgqlc.types.Field( + "UnmarkFileAsViewedPayload", + graphql_name="unmarkFileAsViewed", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UnmarkFileAsViewedInput), graphql_name="input", default=None)),) + ), + ) + """Unmark a pull request file as viewed + + Arguments: + + * `input` (`UnmarkFileAsViewedInput!`): Parameters for + UnmarkFileAsViewed + """ + + unmark_issue_as_duplicate = sgqlc.types.Field( + "UnmarkIssueAsDuplicatePayload", + graphql_name="unmarkIssueAsDuplicate", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UnmarkIssueAsDuplicateInput), graphql_name="input", default=None)),) + ), + ) + """Unmark an issue as a duplicate of another issue. + + Arguments: + + * `input` (`UnmarkIssueAsDuplicateInput!`): Parameters for + UnmarkIssueAsDuplicate + """ + + unminimize_comment = sgqlc.types.Field( + "UnminimizeCommentPayload", + graphql_name="unminimizeComment", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UnminimizeCommentInput), graphql_name="input", default=None)),) + ), + ) + """Unminimizes a comment on an Issue, Commit, Pull Request, or Gist + + Arguments: + + * `input` (`UnminimizeCommentInput!`): Parameters for + UnminimizeComment + """ + + unpin_issue = sgqlc.types.Field( + "UnpinIssuePayload", + graphql_name="unpinIssue", + args=sgqlc.types.ArgDict((("input", sgqlc.types.Arg(sgqlc.types.non_null(UnpinIssueInput), graphql_name="input", default=None)),)), + ) + """Unpin a pinned issue from a repository + + Arguments: + + * `input` (`UnpinIssueInput!`): Parameters for UnpinIssue + """ + + unresolve_review_thread = sgqlc.types.Field( + "UnresolveReviewThreadPayload", + graphql_name="unresolveReviewThread", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UnresolveReviewThreadInput), graphql_name="input", default=None)),) + ), + ) + """Marks a review thread as unresolved. + + Arguments: + + * `input` (`UnresolveReviewThreadInput!`): Parameters for + UnresolveReviewThread + """ + + update_branch_protection_rule = sgqlc.types.Field( + "UpdateBranchProtectionRulePayload", + graphql_name="updateBranchProtectionRule", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UpdateBranchProtectionRuleInput), graphql_name="input", default=None)),) + ), + ) + """Create a new branch protection rule + + Arguments: + + * `input` (`UpdateBranchProtectionRuleInput!`): Parameters for + UpdateBranchProtectionRule + """ + + update_check_run = sgqlc.types.Field( + "UpdateCheckRunPayload", + graphql_name="updateCheckRun", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UpdateCheckRunInput), graphql_name="input", default=None)),) + ), + ) + """Update a check run + + Arguments: + + * `input` (`UpdateCheckRunInput!`): Parameters for UpdateCheckRun + """ + + update_check_suite_preferences = sgqlc.types.Field( + "UpdateCheckSuitePreferencesPayload", + graphql_name="updateCheckSuitePreferences", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UpdateCheckSuitePreferencesInput), graphql_name="input", default=None)),) + ), + ) + """Modifies the settings of an existing check suite + + Arguments: + + * `input` (`UpdateCheckSuitePreferencesInput!`): Parameters for + UpdateCheckSuitePreferences + """ + + update_discussion = sgqlc.types.Field( + "UpdateDiscussionPayload", + graphql_name="updateDiscussion", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UpdateDiscussionInput), graphql_name="input", default=None)),) + ), + ) + """Update a discussion + + Arguments: + + * `input` (`UpdateDiscussionInput!`): Parameters for + UpdateDiscussion + """ + + update_discussion_comment = sgqlc.types.Field( + "UpdateDiscussionCommentPayload", + graphql_name="updateDiscussionComment", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UpdateDiscussionCommentInput), graphql_name="input", default=None)),) + ), + ) + """Update the contents of a comment on a Discussion + + Arguments: + + * `input` (`UpdateDiscussionCommentInput!`): Parameters for + UpdateDiscussionComment + """ + + update_enterprise_administrator_role = sgqlc.types.Field( + "UpdateEnterpriseAdministratorRolePayload", + graphql_name="updateEnterpriseAdministratorRole", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UpdateEnterpriseAdministratorRoleInput), graphql_name="input", default=None)),) + ), + ) + """Updates the role of an enterprise administrator. + + Arguments: + + * `input` (`UpdateEnterpriseAdministratorRoleInput!`): Parameters + for UpdateEnterpriseAdministratorRole + """ + + update_enterprise_allow_private_repository_forking_setting = sgqlc.types.Field( + "UpdateEnterpriseAllowPrivateRepositoryForkingSettingPayload", + graphql_name="updateEnterpriseAllowPrivateRepositoryForkingSetting", + args=sgqlc.types.ArgDict( + ( + ( + "input", + sgqlc.types.Arg( + sgqlc.types.non_null(UpdateEnterpriseAllowPrivateRepositoryForkingSettingInput), graphql_name="input", default=None + ), + ), + ) + ), + ) + """Sets whether private repository forks are enabled for an + enterprise. + + Arguments: + + * `input` + (`UpdateEnterpriseAllowPrivateRepositoryForkingSettingInput!`): + Parameters for + UpdateEnterpriseAllowPrivateRepositoryForkingSetting + """ + + update_enterprise_default_repository_permission_setting = sgqlc.types.Field( + "UpdateEnterpriseDefaultRepositoryPermissionSettingPayload", + graphql_name="updateEnterpriseDefaultRepositoryPermissionSetting", + args=sgqlc.types.ArgDict( + ( + ( + "input", + sgqlc.types.Arg( + sgqlc.types.non_null(UpdateEnterpriseDefaultRepositoryPermissionSettingInput), graphql_name="input", default=None + ), + ), + ) + ), + ) + """Sets the base repository permission for organizations in an + enterprise. + + Arguments: + + * `input` + (`UpdateEnterpriseDefaultRepositoryPermissionSettingInput!`): + Parameters for + UpdateEnterpriseDefaultRepositoryPermissionSetting + """ + + update_enterprise_members_can_change_repository_visibility_setting = sgqlc.types.Field( + "UpdateEnterpriseMembersCanChangeRepositoryVisibilitySettingPayload", + graphql_name="updateEnterpriseMembersCanChangeRepositoryVisibilitySetting", + args=sgqlc.types.ArgDict( + ( + ( + "input", + sgqlc.types.Arg( + sgqlc.types.non_null(UpdateEnterpriseMembersCanChangeRepositoryVisibilitySettingInput), + graphql_name="input", + default=None, + ), + ), + ) + ), + ) + """Sets whether organization members with admin permissions on a + repository can change repository visibility. + + Arguments: + + * `input` (`UpdateEnterpriseMembersCanChangeRepositoryVisibilitySe + ttingInput!`): Parameters for + UpdateEnterpriseMembersCanChangeRepositoryVisibilitySetting + """ + + update_enterprise_members_can_create_repositories_setting = sgqlc.types.Field( + "UpdateEnterpriseMembersCanCreateRepositoriesSettingPayload", + graphql_name="updateEnterpriseMembersCanCreateRepositoriesSetting", + args=sgqlc.types.ArgDict( + ( + ( + "input", + sgqlc.types.Arg( + sgqlc.types.non_null(UpdateEnterpriseMembersCanCreateRepositoriesSettingInput), graphql_name="input", default=None + ), + ), + ) + ), + ) + """Sets the members can create repositories setting for an + enterprise. + + Arguments: + + * `input` + (`UpdateEnterpriseMembersCanCreateRepositoriesSettingInput!`): + Parameters for + UpdateEnterpriseMembersCanCreateRepositoriesSetting + """ + + update_enterprise_members_can_delete_issues_setting = sgqlc.types.Field( + "UpdateEnterpriseMembersCanDeleteIssuesSettingPayload", + graphql_name="updateEnterpriseMembersCanDeleteIssuesSetting", + args=sgqlc.types.ArgDict( + ( + ( + "input", + sgqlc.types.Arg( + sgqlc.types.non_null(UpdateEnterpriseMembersCanDeleteIssuesSettingInput), graphql_name="input", default=None + ), + ), + ) + ), + ) + """Sets the members can delete issues setting for an enterprise. + + Arguments: + + * `input` (`UpdateEnterpriseMembersCanDeleteIssuesSettingInput!`): + Parameters for UpdateEnterpriseMembersCanDeleteIssuesSetting + """ + + update_enterprise_members_can_delete_repositories_setting = sgqlc.types.Field( + "UpdateEnterpriseMembersCanDeleteRepositoriesSettingPayload", + graphql_name="updateEnterpriseMembersCanDeleteRepositoriesSetting", + args=sgqlc.types.ArgDict( + ( + ( + "input", + sgqlc.types.Arg( + sgqlc.types.non_null(UpdateEnterpriseMembersCanDeleteRepositoriesSettingInput), graphql_name="input", default=None + ), + ), + ) + ), + ) + """Sets the members can delete repositories setting for an + enterprise. + + Arguments: + + * `input` + (`UpdateEnterpriseMembersCanDeleteRepositoriesSettingInput!`): + Parameters for + UpdateEnterpriseMembersCanDeleteRepositoriesSetting + """ + + update_enterprise_members_can_invite_collaborators_setting = sgqlc.types.Field( + "UpdateEnterpriseMembersCanInviteCollaboratorsSettingPayload", + graphql_name="updateEnterpriseMembersCanInviteCollaboratorsSetting", + args=sgqlc.types.ArgDict( + ( + ( + "input", + sgqlc.types.Arg( + sgqlc.types.non_null(UpdateEnterpriseMembersCanInviteCollaboratorsSettingInput), graphql_name="input", default=None + ), + ), + ) + ), + ) + """Sets whether members can invite collaborators are enabled for an + enterprise. + + Arguments: + + * `input` + (`UpdateEnterpriseMembersCanInviteCollaboratorsSettingInput!`): + Parameters for + UpdateEnterpriseMembersCanInviteCollaboratorsSetting + """ + + update_enterprise_members_can_make_purchases_setting = sgqlc.types.Field( + "UpdateEnterpriseMembersCanMakePurchasesSettingPayload", + graphql_name="updateEnterpriseMembersCanMakePurchasesSetting", + args=sgqlc.types.ArgDict( + ( + ( + "input", + sgqlc.types.Arg( + sgqlc.types.non_null(UpdateEnterpriseMembersCanMakePurchasesSettingInput), graphql_name="input", default=None + ), + ), + ) + ), + ) + """Sets whether or not an organization admin can make purchases. + + Arguments: + + * `input` + (`UpdateEnterpriseMembersCanMakePurchasesSettingInput!`): + Parameters for UpdateEnterpriseMembersCanMakePurchasesSetting + """ + + update_enterprise_members_can_update_protected_branches_setting = sgqlc.types.Field( + "UpdateEnterpriseMembersCanUpdateProtectedBranchesSettingPayload", + graphql_name="updateEnterpriseMembersCanUpdateProtectedBranchesSetting", + args=sgqlc.types.ArgDict( + ( + ( + "input", + sgqlc.types.Arg( + sgqlc.types.non_null(UpdateEnterpriseMembersCanUpdateProtectedBranchesSettingInput), + graphql_name="input", + default=None, + ), + ), + ) + ), + ) + """Sets the members can update protected branches setting for an + enterprise. + + Arguments: + + * `input` (`UpdateEnterpriseMembersCanUpdateProtectedBranchesSetti + ngInput!`): Parameters for + UpdateEnterpriseMembersCanUpdateProtectedBranchesSetting + """ + + update_enterprise_members_can_view_dependency_insights_setting = sgqlc.types.Field( + "UpdateEnterpriseMembersCanViewDependencyInsightsSettingPayload", + graphql_name="updateEnterpriseMembersCanViewDependencyInsightsSetting", + args=sgqlc.types.ArgDict( + ( + ( + "input", + sgqlc.types.Arg( + sgqlc.types.non_null(UpdateEnterpriseMembersCanViewDependencyInsightsSettingInput), + graphql_name="input", + default=None, + ), + ), + ) + ), + ) + """Sets the members can view dependency insights for an enterprise. + + Arguments: + + * `input` (`UpdateEnterpriseMembersCanViewDependencyInsightsSettin + gInput!`): Parameters for + UpdateEnterpriseMembersCanViewDependencyInsightsSetting + """ + + update_enterprise_organization_projects_setting = sgqlc.types.Field( + "UpdateEnterpriseOrganizationProjectsSettingPayload", + graphql_name="updateEnterpriseOrganizationProjectsSetting", + args=sgqlc.types.ArgDict( + ( + ( + "input", + sgqlc.types.Arg( + sgqlc.types.non_null(UpdateEnterpriseOrganizationProjectsSettingInput), graphql_name="input", default=None + ), + ), + ) + ), + ) + """Sets whether organization projects are enabled for an enterprise. + + Arguments: + + * `input` (`UpdateEnterpriseOrganizationProjectsSettingInput!`): + Parameters for UpdateEnterpriseOrganizationProjectsSetting + """ + + update_enterprise_owner_organization_role = sgqlc.types.Field( + "UpdateEnterpriseOwnerOrganizationRolePayload", + graphql_name="updateEnterpriseOwnerOrganizationRole", + args=sgqlc.types.ArgDict( + ( + ( + "input", + sgqlc.types.Arg(sgqlc.types.non_null(UpdateEnterpriseOwnerOrganizationRoleInput), graphql_name="input", default=None), + ), + ) + ), + ) + """Updates the role of an enterprise owner with an organization. + + Arguments: + + * `input` (`UpdateEnterpriseOwnerOrganizationRoleInput!`): + Parameters for UpdateEnterpriseOwnerOrganizationRole + """ + + update_enterprise_profile = sgqlc.types.Field( + "UpdateEnterpriseProfilePayload", + graphql_name="updateEnterpriseProfile", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UpdateEnterpriseProfileInput), graphql_name="input", default=None)),) + ), + ) + """Updates an enterprise's profile. + + Arguments: + + * `input` (`UpdateEnterpriseProfileInput!`): Parameters for + UpdateEnterpriseProfile + """ + + update_enterprise_repository_projects_setting = sgqlc.types.Field( + "UpdateEnterpriseRepositoryProjectsSettingPayload", + graphql_name="updateEnterpriseRepositoryProjectsSetting", + args=sgqlc.types.ArgDict( + ( + ( + "input", + sgqlc.types.Arg( + sgqlc.types.non_null(UpdateEnterpriseRepositoryProjectsSettingInput), graphql_name="input", default=None + ), + ), + ) + ), + ) + """Sets whether repository projects are enabled for a enterprise. + + Arguments: + + * `input` (`UpdateEnterpriseRepositoryProjectsSettingInput!`): + Parameters for UpdateEnterpriseRepositoryProjectsSetting + """ + + update_enterprise_team_discussions_setting = sgqlc.types.Field( + "UpdateEnterpriseTeamDiscussionsSettingPayload", + graphql_name="updateEnterpriseTeamDiscussionsSetting", + args=sgqlc.types.ArgDict( + ( + ( + "input", + sgqlc.types.Arg(sgqlc.types.non_null(UpdateEnterpriseTeamDiscussionsSettingInput), graphql_name="input", default=None), + ), + ) + ), + ) + """Sets whether team discussions are enabled for an enterprise. + + Arguments: + + * `input` (`UpdateEnterpriseTeamDiscussionsSettingInput!`): + Parameters for UpdateEnterpriseTeamDiscussionsSetting + """ + + update_enterprise_two_factor_authentication_required_setting = sgqlc.types.Field( + "UpdateEnterpriseTwoFactorAuthenticationRequiredSettingPayload", + graphql_name="updateEnterpriseTwoFactorAuthenticationRequiredSetting", + args=sgqlc.types.ArgDict( + ( + ( + "input", + sgqlc.types.Arg( + sgqlc.types.non_null(UpdateEnterpriseTwoFactorAuthenticationRequiredSettingInput), + graphql_name="input", + default=None, + ), + ), + ) + ), + ) + """Sets whether two factor authentication is required for all users + in an enterprise. + + Arguments: + + * `input` (`UpdateEnterpriseTwoFactorAuthenticationRequiredSetting + Input!`): Parameters for + UpdateEnterpriseTwoFactorAuthenticationRequiredSetting + """ + + update_environment = sgqlc.types.Field( + "UpdateEnvironmentPayload", + graphql_name="updateEnvironment", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UpdateEnvironmentInput), graphql_name="input", default=None)),) + ), + ) + """Updates an environment. + + Arguments: + + * `input` (`UpdateEnvironmentInput!`): Parameters for + UpdateEnvironment + """ + + update_ip_allow_list_enabled_setting = sgqlc.types.Field( + "UpdateIpAllowListEnabledSettingPayload", + graphql_name="updateIpAllowListEnabledSetting", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UpdateIpAllowListEnabledSettingInput), graphql_name="input", default=None)),) + ), + ) + """Sets whether an IP allow list is enabled on an owner. + + Arguments: + + * `input` (`UpdateIpAllowListEnabledSettingInput!`): Parameters + for UpdateIpAllowListEnabledSetting + """ + + update_ip_allow_list_entry = sgqlc.types.Field( + "UpdateIpAllowListEntryPayload", + graphql_name="updateIpAllowListEntry", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UpdateIpAllowListEntryInput), graphql_name="input", default=None)),) + ), + ) + """Updates an IP allow list entry. + + Arguments: + + * `input` (`UpdateIpAllowListEntryInput!`): Parameters for + UpdateIpAllowListEntry + """ + + update_ip_allow_list_for_installed_apps_enabled_setting = sgqlc.types.Field( + "UpdateIpAllowListForInstalledAppsEnabledSettingPayload", + graphql_name="updateIpAllowListForInstalledAppsEnabledSetting", + args=sgqlc.types.ArgDict( + ( + ( + "input", + sgqlc.types.Arg( + sgqlc.types.non_null(UpdateIpAllowListForInstalledAppsEnabledSettingInput), graphql_name="input", default=None + ), + ), + ) + ), + ) + """Sets whether IP allow list configuration for installed GitHub Apps + is enabled on an owner. + + Arguments: + + * `input` + (`UpdateIpAllowListForInstalledAppsEnabledSettingInput!`): + Parameters for UpdateIpAllowListForInstalledAppsEnabledSetting + """ + + update_issue = sgqlc.types.Field( + "UpdateIssuePayload", + graphql_name="updateIssue", + args=sgqlc.types.ArgDict((("input", sgqlc.types.Arg(sgqlc.types.non_null(UpdateIssueInput), graphql_name="input", default=None)),)), + ) + """Updates an Issue. + + Arguments: + + * `input` (`UpdateIssueInput!`): Parameters for UpdateIssue + """ + + update_issue_comment = sgqlc.types.Field( + "UpdateIssueCommentPayload", + graphql_name="updateIssueComment", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UpdateIssueCommentInput), graphql_name="input", default=None)),) + ), + ) + """Updates an IssueComment object. + + Arguments: + + * `input` (`UpdateIssueCommentInput!`): Parameters for + UpdateIssueComment + """ + + update_notification_restriction_setting = sgqlc.types.Field( + "UpdateNotificationRestrictionSettingPayload", + graphql_name="updateNotificationRestrictionSetting", + args=sgqlc.types.ArgDict( + ( + ( + "input", + sgqlc.types.Arg(sgqlc.types.non_null(UpdateNotificationRestrictionSettingInput), graphql_name="input", default=None), + ), + ) + ), + ) + """Update the setting to restrict notifications to only verified or + approved domains available to an owner. + + Arguments: + + * `input` (`UpdateNotificationRestrictionSettingInput!`): + Parameters for UpdateNotificationRestrictionSetting + """ + + update_organization_allow_private_repository_forking_setting = sgqlc.types.Field( + "UpdateOrganizationAllowPrivateRepositoryForkingSettingPayload", + graphql_name="updateOrganizationAllowPrivateRepositoryForkingSetting", + args=sgqlc.types.ArgDict( + ( + ( + "input", + sgqlc.types.Arg( + sgqlc.types.non_null(UpdateOrganizationAllowPrivateRepositoryForkingSettingInput), + graphql_name="input", + default=None, + ), + ), + ) + ), + ) + """Sets whether private repository forks are enabled for an + organization. + + Arguments: + + * `input` (`UpdateOrganizationAllowPrivateRepositoryForkingSetting + Input!`): Parameters for + UpdateOrganizationAllowPrivateRepositoryForkingSetting + """ + + update_project = sgqlc.types.Field( + "UpdateProjectPayload", + graphql_name="updateProject", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UpdateProjectInput), graphql_name="input", default=None)),) + ), + ) + """Updates an existing project. + + Arguments: + + * `input` (`UpdateProjectInput!`): Parameters for UpdateProject + """ + + update_project_card = sgqlc.types.Field( + "UpdateProjectCardPayload", + graphql_name="updateProjectCard", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UpdateProjectCardInput), graphql_name="input", default=None)),) + ), + ) + """Updates an existing project card. + + Arguments: + + * `input` (`UpdateProjectCardInput!`): Parameters for + UpdateProjectCard + """ + + update_project_column = sgqlc.types.Field( + "UpdateProjectColumnPayload", + graphql_name="updateProjectColumn", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UpdateProjectColumnInput), graphql_name="input", default=None)),) + ), + ) + """Updates an existing project column. + + Arguments: + + * `input` (`UpdateProjectColumnInput!`): Parameters for + UpdateProjectColumn + """ + + update_project_draft_issue = sgqlc.types.Field( + "UpdateProjectDraftIssuePayload", + graphql_name="updateProjectDraftIssue", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UpdateProjectDraftIssueInput), graphql_name="input", default=None)),) + ), + ) + """Updates a draft issue within a Project. + + Arguments: + + * `input` (`UpdateProjectDraftIssueInput!`): Parameters for + UpdateProjectDraftIssue + """ + + update_project_next = sgqlc.types.Field( + "UpdateProjectNextPayload", + graphql_name="updateProjectNext", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UpdateProjectNextInput), graphql_name="input", default=None)),) + ), + ) + """Updates an existing project (beta). + + Arguments: + + * `input` (`UpdateProjectNextInput!`): Parameters for + UpdateProjectNext + """ + + update_project_next_item_field = sgqlc.types.Field( + "UpdateProjectNextItemFieldPayload", + graphql_name="updateProjectNextItemField", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UpdateProjectNextItemFieldInput), graphql_name="input", default=None)),) + ), + ) + """Updates a field of an item from a Project. + + Arguments: + + * `input` (`UpdateProjectNextItemFieldInput!`): Parameters for + UpdateProjectNextItemField + """ + + update_pull_request = sgqlc.types.Field( + "UpdatePullRequestPayload", + graphql_name="updatePullRequest", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UpdatePullRequestInput), graphql_name="input", default=None)),) + ), + ) + """Update a pull request + + Arguments: + + * `input` (`UpdatePullRequestInput!`): Parameters for + UpdatePullRequest + """ + + update_pull_request_branch = sgqlc.types.Field( + "UpdatePullRequestBranchPayload", + graphql_name="updatePullRequestBranch", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UpdatePullRequestBranchInput), graphql_name="input", default=None)),) + ), + ) + """Merge or Rebase HEAD from upstream branch into pull request branch + + Arguments: + + * `input` (`UpdatePullRequestBranchInput!`): Parameters for + UpdatePullRequestBranch + """ + + update_pull_request_review = sgqlc.types.Field( + "UpdatePullRequestReviewPayload", + graphql_name="updatePullRequestReview", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UpdatePullRequestReviewInput), graphql_name="input", default=None)),) + ), + ) + """Updates the body of a pull request review. + + Arguments: + + * `input` (`UpdatePullRequestReviewInput!`): Parameters for + UpdatePullRequestReview + """ + + update_pull_request_review_comment = sgqlc.types.Field( + "UpdatePullRequestReviewCommentPayload", + graphql_name="updatePullRequestReviewComment", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UpdatePullRequestReviewCommentInput), graphql_name="input", default=None)),) + ), + ) + """Updates a pull request review comment. + + Arguments: + + * `input` (`UpdatePullRequestReviewCommentInput!`): Parameters for + UpdatePullRequestReviewComment + """ + + update_ref = sgqlc.types.Field( + "UpdateRefPayload", + graphql_name="updateRef", + args=sgqlc.types.ArgDict((("input", sgqlc.types.Arg(sgqlc.types.non_null(UpdateRefInput), graphql_name="input", default=None)),)), + ) + """Update a Git Ref. + + Arguments: + + * `input` (`UpdateRefInput!`): Parameters for UpdateRef + """ + + update_repository = sgqlc.types.Field( + "UpdateRepositoryPayload", + graphql_name="updateRepository", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UpdateRepositoryInput), graphql_name="input", default=None)),) + ), + ) + """Update information about a repository. + + Arguments: + + * `input` (`UpdateRepositoryInput!`): Parameters for + UpdateRepository + """ + + update_sponsorship_preferences = sgqlc.types.Field( + "UpdateSponsorshipPreferencesPayload", + graphql_name="updateSponsorshipPreferences", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UpdateSponsorshipPreferencesInput), graphql_name="input", default=None)),) + ), + ) + """Change visibility of your sponsorship and opt in or out of email + updates from the maintainer. + + Arguments: + + * `input` (`UpdateSponsorshipPreferencesInput!`): Parameters for + UpdateSponsorshipPreferences + """ + + update_subscription = sgqlc.types.Field( + "UpdateSubscriptionPayload", + graphql_name="updateSubscription", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UpdateSubscriptionInput), graphql_name="input", default=None)),) + ), + ) + """Updates the state for subscribable subjects. + + Arguments: + + * `input` (`UpdateSubscriptionInput!`): Parameters for + UpdateSubscription + """ + + update_team_discussion = sgqlc.types.Field( + "UpdateTeamDiscussionPayload", + graphql_name="updateTeamDiscussion", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UpdateTeamDiscussionInput), graphql_name="input", default=None)),) + ), + ) + """Updates a team discussion. + + Arguments: + + * `input` (`UpdateTeamDiscussionInput!`): Parameters for + UpdateTeamDiscussion + """ + + update_team_discussion_comment = sgqlc.types.Field( + "UpdateTeamDiscussionCommentPayload", + graphql_name="updateTeamDiscussionComment", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UpdateTeamDiscussionCommentInput), graphql_name="input", default=None)),) + ), + ) + """Updates a discussion comment. + + Arguments: + + * `input` (`UpdateTeamDiscussionCommentInput!`): Parameters for + UpdateTeamDiscussionComment + """ + + update_teams_repository = sgqlc.types.Field( + "UpdateTeamsRepositoryPayload", + graphql_name="updateTeamsRepository", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UpdateTeamsRepositoryInput), graphql_name="input", default=None)),) + ), + ) + """Update team repository. + + Arguments: + + * `input` (`UpdateTeamsRepositoryInput!`): Parameters for + UpdateTeamsRepository + """ + + update_topics = sgqlc.types.Field( + "UpdateTopicsPayload", + graphql_name="updateTopics", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UpdateTopicsInput), graphql_name="input", default=None)),) + ), + ) + """Replaces the repository's topics with the given topics. + + Arguments: + + * `input` (`UpdateTopicsInput!`): Parameters for UpdateTopics + """ + + verify_verifiable_domain = sgqlc.types.Field( + "VerifyVerifiableDomainPayload", + graphql_name="verifyVerifiableDomain", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(VerifyVerifiableDomainInput), graphql_name="input", default=None)),) + ), + ) + """Verify that a verifiable domain has the expected DNS record. + + Arguments: + + * `input` (`VerifyVerifiableDomainInput!`): Parameters for + VerifyVerifiableDomain + """ + + +class Node(sgqlc.types.Interface): + """An object with an ID.""" + + __schema__ = github_schema + __field_names__ = ("id",) + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + """ID of the object.""" + + +class OauthApplicationAuditEntryData(sgqlc.types.Interface): + """Metadata for an audit entry with action oauth_application.*""" + + __schema__ = github_schema + __field_names__ = ("oauth_application_name", "oauth_application_resource_path", "oauth_application_url") + oauth_application_name = sgqlc.types.Field(String, graphql_name="oauthApplicationName") + """The name of the OAuth Application.""" + + oauth_application_resource_path = sgqlc.types.Field(URI, graphql_name="oauthApplicationResourcePath") + """The HTTP path for the OAuth Application""" + + oauth_application_url = sgqlc.types.Field(URI, graphql_name="oauthApplicationUrl") + """The HTTP URL for the OAuth Application""" + + +class OrganizationAuditEntryConnection(sgqlc.types.relay.Connection): + """The connection type for OrganizationAuditEntry.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("OrganizationAuditEntryEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("OrganizationAuditEntry"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class OrganizationAuditEntryData(sgqlc.types.Interface): + """Metadata for an audit entry with action org.*""" + + __schema__ = github_schema + __field_names__ = ("organization", "organization_name", "organization_resource_path", "organization_url") + organization = sgqlc.types.Field("Organization", graphql_name="organization") + """The Organization associated with the Audit Entry.""" + + organization_name = sgqlc.types.Field(String, graphql_name="organizationName") + """The name of the Organization.""" + + organization_resource_path = sgqlc.types.Field(URI, graphql_name="organizationResourcePath") + """The HTTP path for the organization""" + + organization_url = sgqlc.types.Field(URI, graphql_name="organizationUrl") + """The HTTP URL for the organization""" + + +class OrganizationAuditEntryEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("OrganizationAuditEntry", graphql_name="node") + """The item at the end of the edge.""" + + +class OrganizationConnection(sgqlc.types.relay.Connection): + """A list of organizations managed by an enterprise.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("OrganizationEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("Organization"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class OrganizationEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("Organization", graphql_name="node") + """The item at the end of the edge.""" + + +class OrganizationEnterpriseOwnerConnection(sgqlc.types.relay.Connection): + """The connection type for User.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("OrganizationEnterpriseOwnerEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("User"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class OrganizationEnterpriseOwnerEdge(sgqlc.types.Type): + """An enterprise owner in the context of an organization that is part + of the enterprise. + """ + + __schema__ = github_schema + __field_names__ = ("cursor", "node", "organization_role") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("User", graphql_name="node") + """The item at the end of the edge.""" + + organization_role = sgqlc.types.Field(sgqlc.types.non_null(RoleInOrganization), graphql_name="organizationRole") + """The role of the owner with respect to the organization.""" + + +class OrganizationInvitationConnection(sgqlc.types.relay.Connection): + """The connection type for OrganizationInvitation.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("OrganizationInvitationEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("OrganizationInvitation"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class OrganizationInvitationEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("OrganizationInvitation", graphql_name="node") + """The item at the end of the edge.""" + + +class OrganizationMemberConnection(sgqlc.types.relay.Connection): + """The connection type for User.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("OrganizationMemberEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("User"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class OrganizationMemberEdge(sgqlc.types.Type): + """Represents a user within an organization.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "has_two_factor_enabled", "node", "role") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + has_two_factor_enabled = sgqlc.types.Field(Boolean, graphql_name="hasTwoFactorEnabled") + """Whether the organization member has two factor enabled or not. + Returns null if information is not available to viewer. + """ + + node = sgqlc.types.Field("User", graphql_name="node") + """The item at the end of the edge.""" + + role = sgqlc.types.Field(OrganizationMemberRole, graphql_name="role") + """The role this user has in the organization.""" + + +class PackageConnection(sgqlc.types.relay.Connection): + """The connection type for Package.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("PackageEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("Package"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class PackageEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("Package", graphql_name="node") + """The item at the end of the edge.""" + + +class PackageFileConnection(sgqlc.types.relay.Connection): + """The connection type for PackageFile.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("PackageFileEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("PackageFile"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class PackageFileEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("PackageFile", graphql_name="node") + """The item at the end of the edge.""" + + +class PackageOwner(sgqlc.types.Interface): + """Represents an owner of a package.""" + + __schema__ = github_schema + __field_names__ = ("id", "packages") + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + + packages = sgqlc.types.Field( + sgqlc.types.non_null(PackageConnection), + graphql_name="packages", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("names", sgqlc.types.Arg(sgqlc.types.list_of(String), graphql_name="names", default=None)), + ("repository_id", sgqlc.types.Arg(ID, graphql_name="repositoryId", default=None)), + ("package_type", sgqlc.types.Arg(PackageType, graphql_name="packageType", default=None)), + ("order_by", sgqlc.types.Arg(PackageOrder, graphql_name="orderBy", default={"field": "CREATED_AT", "direction": "DESC"})), + ) + ), + ) + """A list of packages under the owner. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `names` (`[String]`): Find packages by their names. + * `repository_id` (`ID`): Find packages in a repository by ID. + * `package_type` (`PackageType`): Filter registry package by type. + * `order_by` (`PackageOrder`): Ordering of the returned packages. + (default: `{field: CREATED_AT, direction: DESC}`) + """ + + +class PackageStatistics(sgqlc.types.Type): + """Represents a object that contains package activity statistics such + as downloads. + """ + + __schema__ = github_schema + __field_names__ = ("downloads_total_count",) + downloads_total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="downloadsTotalCount") + """Number of times the package was downloaded since it was created.""" + + +class PackageVersionConnection(sgqlc.types.relay.Connection): + """The connection type for PackageVersion.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("PackageVersionEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("PackageVersion"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class PackageVersionEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("PackageVersion", graphql_name="node") + """The item at the end of the edge.""" + + +class PackageVersionStatistics(sgqlc.types.Type): + """Represents a object that contains package version activity + statistics such as downloads. + """ + + __schema__ = github_schema + __field_names__ = ("downloads_total_count",) + downloads_total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="downloadsTotalCount") + """Number of times the package was downloaded since it was created.""" + + +class PageInfo(sgqlc.types.Type): + """Information about pagination in a connection.""" + + __schema__ = github_schema + __field_names__ = ("end_cursor", "has_next_page", "has_previous_page", "start_cursor") + end_cursor = sgqlc.types.Field(String, graphql_name="endCursor") + """When paginating forwards, the cursor to continue.""" + + has_next_page = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="hasNextPage") + """When paginating forwards, are there more items?""" + + has_previous_page = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="hasPreviousPage") + """When paginating backwards, are there more items?""" + + start_cursor = sgqlc.types.Field(String, graphql_name="startCursor") + """When paginating backwards, the cursor to continue.""" + + +class PermissionSource(sgqlc.types.Type): + """A level of permission and source for a user's access to a + repository. + """ + + __schema__ = github_schema + __field_names__ = ("organization", "permission", "source") + organization = sgqlc.types.Field(sgqlc.types.non_null("Organization"), graphql_name="organization") + """The organization the repository belongs to.""" + + permission = sgqlc.types.Field(sgqlc.types.non_null(DefaultRepositoryPermissionField), graphql_name="permission") + """The level of access this source has granted to the user.""" + + source = sgqlc.types.Field(sgqlc.types.non_null("PermissionGranter"), graphql_name="source") + """The source of this permission.""" + + +class PinIssuePayload(sgqlc.types.Type): + """Autogenerated return type of PinIssue""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "issue") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + issue = sgqlc.types.Field("Issue", graphql_name="issue") + """The issue that was pinned""" + + +class PinnableItemConnection(sgqlc.types.relay.Connection): + """The connection type for PinnableItem.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("PinnableItemEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("PinnableItem"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class PinnableItemEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("PinnableItem", graphql_name="node") + """The item at the end of the edge.""" + + +class PinnedDiscussionConnection(sgqlc.types.relay.Connection): + """The connection type for PinnedDiscussion.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("PinnedDiscussionEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("PinnedDiscussion"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class PinnedDiscussionEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("PinnedDiscussion", graphql_name="node") + """The item at the end of the edge.""" + + +class PinnedIssueConnection(sgqlc.types.relay.Connection): + """The connection type for PinnedIssue.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("PinnedIssueEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("PinnedIssue"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class PinnedIssueEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("PinnedIssue", graphql_name="node") + """The item at the end of the edge.""" + + +class ProfileItemShowcase(sgqlc.types.Type): + """A curatable list of repositories relating to a repository owner, + which defaults to showing the most popular repositories they own. + """ + + __schema__ = github_schema + __field_names__ = ("has_pinned_items", "items") + has_pinned_items = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="hasPinnedItems") + """Whether or not the owner has pinned any repositories or gists.""" + + items = sgqlc.types.Field( + sgqlc.types.non_null(PinnableItemConnection), + graphql_name="items", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """The repositories and gists in the showcase. If the profile owner + has any pinned items, those will be returned. Otherwise, the + profile owner's popular repositories will be returned. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + +class ProfileOwner(sgqlc.types.Interface): + """Represents any entity on GitHub that has a profile page.""" + + __schema__ = github_schema + __field_names__ = ( + "any_pinnable_items", + "email", + "id", + "item_showcase", + "location", + "login", + "name", + "pinnable_items", + "pinned_items", + "pinned_items_remaining", + "viewer_can_change_pinned_items", + "website_url", + ) + any_pinnable_items = sgqlc.types.Field( + sgqlc.types.non_null(Boolean), + graphql_name="anyPinnableItems", + args=sgqlc.types.ArgDict((("type", sgqlc.types.Arg(PinnableItemType, graphql_name="type", default=None)),)), + ) + """Determine if this repository owner has any items that can be + pinned to their profile. + + Arguments: + + * `type` (`PinnableItemType`): Filter to only a particular kind of + pinnable item. + """ + + email = sgqlc.types.Field(String, graphql_name="email") + """The public profile email.""" + + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + + item_showcase = sgqlc.types.Field(sgqlc.types.non_null(ProfileItemShowcase), graphql_name="itemShowcase") + """Showcases a selection of repositories and gists that the profile + owner has either curated or that have been selected automatically + based on popularity. + """ + + location = sgqlc.types.Field(String, graphql_name="location") + """The public profile location.""" + + login = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="login") + """The username used to login.""" + + name = sgqlc.types.Field(String, graphql_name="name") + """The public profile name.""" + + pinnable_items = sgqlc.types.Field( + sgqlc.types.non_null(PinnableItemConnection), + graphql_name="pinnableItems", + args=sgqlc.types.ArgDict( + ( + ("types", sgqlc.types.Arg(sgqlc.types.list_of(sgqlc.types.non_null(PinnableItemType)), graphql_name="types", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of repositories and gists this profile owner can pin to + their profile. + + Arguments: + + * `types` (`[PinnableItemType!]`): Filter the types of pinnable + items that are returned. + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + pinned_items = sgqlc.types.Field( + sgqlc.types.non_null(PinnableItemConnection), + graphql_name="pinnedItems", + args=sgqlc.types.ArgDict( + ( + ("types", sgqlc.types.Arg(sgqlc.types.list_of(sgqlc.types.non_null(PinnableItemType)), graphql_name="types", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of repositories and gists this profile owner has pinned to + their profile + + Arguments: + + * `types` (`[PinnableItemType!]`): Filter the types of pinned + items that are returned. + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + pinned_items_remaining = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="pinnedItemsRemaining") + """Returns how many more items this profile owner can pin to their + profile. + """ + + viewer_can_change_pinned_items = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanChangePinnedItems") + """Can the viewer pin repositories and gists to the profile?""" + + website_url = sgqlc.types.Field(URI, graphql_name="websiteUrl") + """The public profile website URL.""" + + +class ProjectCardConnection(sgqlc.types.relay.Connection): + """The connection type for ProjectCard.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("ProjectCardEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("ProjectCard"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class ProjectCardEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("ProjectCard", graphql_name="node") + """The item at the end of the edge.""" + + +class ProjectColumnConnection(sgqlc.types.relay.Connection): + """The connection type for ProjectColumn.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("ProjectColumnEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("ProjectColumn"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class ProjectColumnEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("ProjectColumn", graphql_name="node") + """The item at the end of the edge.""" + + +class ProjectConnection(sgqlc.types.relay.Connection): + """A list of projects associated with the owner.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("ProjectEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("Project"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class ProjectEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("Project", graphql_name="node") + """The item at the end of the edge.""" + + +class ProjectNextConnection(sgqlc.types.relay.Connection): + """The connection type for ProjectNext.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("ProjectNextEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("ProjectNext"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class ProjectNextEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("ProjectNext", graphql_name="node") + """The item at the end of the edge.""" + + +class ProjectNextFieldCommon(sgqlc.types.Interface): + """Common fields across different field types""" + + __schema__ = github_schema + __field_names__ = ("created_at", "data_type", "database_id", "id", "name", "project", "settings", "updated_at") + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + data_type = sgqlc.types.Field(sgqlc.types.non_null(ProjectNextFieldType), graphql_name="dataType") + """The field's type.""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The project field's name.""" + + project = sgqlc.types.Field(sgqlc.types.non_null("ProjectNext"), graphql_name="project") + """The project that contains this field.""" + + settings = sgqlc.types.Field(String, graphql_name="settings") + """The field's settings.""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + +class ProjectNextFieldConnection(sgqlc.types.relay.Connection): + """The connection type for ProjectNextField.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("ProjectNextFieldEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("ProjectNextField"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class ProjectNextFieldEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("ProjectNextField", graphql_name="node") + """The item at the end of the edge.""" + + +class ProjectNextItemConnection(sgqlc.types.relay.Connection): + """The connection type for ProjectNextItem.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("ProjectNextItemEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("ProjectNextItem"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class ProjectNextItemEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("ProjectNextItem", graphql_name="node") + """The item at the end of the edge.""" + + +class ProjectNextItemFieldValueConnection(sgqlc.types.relay.Connection): + """The connection type for ProjectNextItemFieldValue.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("ProjectNextItemFieldValueEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("ProjectNextItemFieldValue"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class ProjectNextItemFieldValueEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("ProjectNextItemFieldValue", graphql_name="node") + """The item at the end of the edge.""" + + +class ProjectNextOwner(sgqlc.types.Interface): + """Represents an owner of a project (beta).""" + + __schema__ = github_schema + __field_names__ = ("id", "project_next", "projects_next") + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + + project_next = sgqlc.types.Field( + "ProjectNext", + graphql_name="projectNext", + args=sgqlc.types.ArgDict((("number", sgqlc.types.Arg(sgqlc.types.non_null(Int), graphql_name="number", default=None)),)), + ) + """Find a project by project (beta) number. + + Arguments: + + * `number` (`Int!`): The project (beta) number. + """ + + projects_next = sgqlc.types.Field( + sgqlc.types.non_null(ProjectNextConnection), + graphql_name="projectsNext", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("query", sgqlc.types.Arg(String, graphql_name="query", default=None)), + ("sort_by", sgqlc.types.Arg(ProjectNextOrderField, graphql_name="sortBy", default="TITLE")), + ) + ), + ) + """A list of projects (beta) under the owner. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `query` (`String`): A project (beta) to search for under the the + owner. + * `sort_by` (`ProjectNextOrderField`): How to order the returned + projects (beta). (default: `TITLE`) + """ + + +class ProjectOwner(sgqlc.types.Interface): + """Represents an owner of a Project.""" + + __schema__ = github_schema + __field_names__ = ("id", "project", "projects", "projects_resource_path", "projects_url", "viewer_can_create_projects") + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + + project = sgqlc.types.Field( + "Project", + graphql_name="project", + args=sgqlc.types.ArgDict((("number", sgqlc.types.Arg(sgqlc.types.non_null(Int), graphql_name="number", default=None)),)), + ) + """Find project by number. + + Arguments: + + * `number` (`Int!`): The project number to find. + """ + + projects = sgqlc.types.Field( + sgqlc.types.non_null(ProjectConnection), + graphql_name="projects", + args=sgqlc.types.ArgDict( + ( + ("order_by", sgqlc.types.Arg(ProjectOrder, graphql_name="orderBy", default=None)), + ("search", sgqlc.types.Arg(String, graphql_name="search", default=None)), + ("states", sgqlc.types.Arg(sgqlc.types.list_of(sgqlc.types.non_null(ProjectState)), graphql_name="states", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of projects under the owner. + + Arguments: + + * `order_by` (`ProjectOrder`): Ordering options for projects + returned from the connection + * `search` (`String`): Query to search projects by, currently only + searching by name. + * `states` (`[ProjectState!]`): A list of states to filter the + projects by. + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + projects_resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="projectsResourcePath") + """The HTTP path listing owners projects""" + + projects_url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="projectsUrl") + """The HTTP URL listing owners projects""" + + viewer_can_create_projects = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanCreateProjects") + """Can the current viewer create new projects on this owner.""" + + +class ProjectProgress(sgqlc.types.Type): + """Project progress stats.""" + + __schema__ = github_schema + __field_names__ = ( + "done_count", + "done_percentage", + "enabled", + "in_progress_count", + "in_progress_percentage", + "todo_count", + "todo_percentage", + ) + done_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="doneCount") + """The number of done cards.""" + + done_percentage = sgqlc.types.Field(sgqlc.types.non_null(Float), graphql_name="donePercentage") + """The percentage of done cards.""" + + enabled = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="enabled") + """Whether progress tracking is enabled and cards with purpose exist + for this project + """ + + in_progress_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="inProgressCount") + """The number of in-progress cards.""" + + in_progress_percentage = sgqlc.types.Field(sgqlc.types.non_null(Float), graphql_name="inProgressPercentage") + """The percentage of in-progress cards.""" + + todo_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="todoCount") + """The number of to do cards.""" + + todo_percentage = sgqlc.types.Field(sgqlc.types.non_null(Float), graphql_name="todoPercentage") + """The percentage of to do cards.""" + + +class ProjectViewConnection(sgqlc.types.relay.Connection): + """The connection type for ProjectView.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("ProjectViewEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("ProjectView"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class ProjectViewEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("ProjectView", graphql_name="node") + """The item at the end of the edge.""" + + +class PublicKeyConnection(sgqlc.types.relay.Connection): + """The connection type for PublicKey.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("PublicKeyEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("PublicKey"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class PublicKeyEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("PublicKey", graphql_name="node") + """The item at the end of the edge.""" + + +class PullRequestChangedFile(sgqlc.types.Type): + """A file changed in a pull request.""" + + __schema__ = github_schema + __field_names__ = ("additions", "change_type", "deletions", "path", "viewer_viewed_state") + additions = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="additions") + """The number of additions to the file.""" + + change_type = sgqlc.types.Field(sgqlc.types.non_null(PatchStatus), graphql_name="changeType") + """How the file was changed in this PullRequest""" + + deletions = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="deletions") + """The number of deletions to the file.""" + + path = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="path") + """The path of the file.""" + + viewer_viewed_state = sgqlc.types.Field(sgqlc.types.non_null(FileViewedState), graphql_name="viewerViewedState") + """The state of the file for the viewer.""" + + +class PullRequestChangedFileConnection(sgqlc.types.relay.Connection): + """The connection type for PullRequestChangedFile.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("PullRequestChangedFileEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of(PullRequestChangedFile), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class PullRequestChangedFileEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field(PullRequestChangedFile, graphql_name="node") + """The item at the end of the edge.""" + + +class PullRequestCommitConnection(sgqlc.types.relay.Connection): + """The connection type for PullRequestCommit.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("PullRequestCommitEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("PullRequestCommit"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class PullRequestCommitEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("PullRequestCommit", graphql_name="node") + """The item at the end of the edge.""" + + +class PullRequestConnection(sgqlc.types.relay.Connection): + """The connection type for PullRequest.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("PullRequestEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("PullRequest"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class PullRequestContributionsByRepository(sgqlc.types.Type): + """This aggregates pull requests opened by a user within one + repository. + """ + + __schema__ = github_schema + __field_names__ = ("contributions", "repository") + contributions = sgqlc.types.Field( + sgqlc.types.non_null(CreatedPullRequestContributionConnection), + graphql_name="contributions", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("order_by", sgqlc.types.Arg(ContributionOrder, graphql_name="orderBy", default={"direction": "DESC"})), + ) + ), + ) + """The pull request contributions. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `order_by` (`ContributionOrder`): Ordering options for + contributions returned from the connection. (default: + `{direction: DESC}`) + """ + + repository = sgqlc.types.Field(sgqlc.types.non_null("Repository"), graphql_name="repository") + """The repository in which the pull requests were opened.""" + + +class PullRequestEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("PullRequest", graphql_name="node") + """The item at the end of the edge.""" + + +class PullRequestReviewCommentConnection(sgqlc.types.relay.Connection): + """The connection type for PullRequestReviewComment.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("PullRequestReviewCommentEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("PullRequestReviewComment"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class PullRequestReviewCommentEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("PullRequestReviewComment", graphql_name="node") + """The item at the end of the edge.""" + + +class PullRequestReviewConnection(sgqlc.types.relay.Connection): + """The connection type for PullRequestReview.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("PullRequestReviewEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("PullRequestReview"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class PullRequestReviewContributionsByRepository(sgqlc.types.Type): + """This aggregates pull request reviews made by a user within one + repository. + """ + + __schema__ = github_schema + __field_names__ = ("contributions", "repository") + contributions = sgqlc.types.Field( + sgqlc.types.non_null(CreatedPullRequestReviewContributionConnection), + graphql_name="contributions", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("order_by", sgqlc.types.Arg(ContributionOrder, graphql_name="orderBy", default={"direction": "DESC"})), + ) + ), + ) + """The pull request review contributions. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `order_by` (`ContributionOrder`): Ordering options for + contributions returned from the connection. (default: + `{direction: DESC}`) + """ + + repository = sgqlc.types.Field(sgqlc.types.non_null("Repository"), graphql_name="repository") + """The repository in which the pull request reviews were made.""" + + +class PullRequestReviewEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("PullRequestReview", graphql_name="node") + """The item at the end of the edge.""" + + +class PullRequestReviewThreadConnection(sgqlc.types.relay.Connection): + """Review comment threads for a pull request review.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("PullRequestReviewThreadEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("PullRequestReviewThread"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class PullRequestReviewThreadEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("PullRequestReviewThread", graphql_name="node") + """The item at the end of the edge.""" + + +class PullRequestRevisionMarker(sgqlc.types.Type): + """Represents the latest point in the pull request timeline for which + the viewer has seen the pull request's commits. + """ + + __schema__ = github_schema + __field_names__ = ("created_at", "last_seen_commit", "pull_request") + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + last_seen_commit = sgqlc.types.Field(sgqlc.types.non_null("Commit"), graphql_name="lastSeenCommit") + """The last commit the viewer has seen.""" + + pull_request = sgqlc.types.Field(sgqlc.types.non_null("PullRequest"), graphql_name="pullRequest") + """The pull request to which the marker belongs.""" + + +class PullRequestTemplate(sgqlc.types.Type): + """A repository pull request template.""" + + __schema__ = github_schema + __field_names__ = ("body", "filename", "repository") + body = sgqlc.types.Field(String, graphql_name="body") + """The body of the template""" + + filename = sgqlc.types.Field(String, graphql_name="filename") + """The filename of the template""" + + repository = sgqlc.types.Field(sgqlc.types.non_null("Repository"), graphql_name="repository") + """The repository the template belongs to""" + + +class PullRequestTimelineConnection(sgqlc.types.relay.Connection): + """The connection type for PullRequestTimelineItem.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("PullRequestTimelineItemEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("PullRequestTimelineItem"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class PullRequestTimelineItemEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("PullRequestTimelineItem", graphql_name="node") + """The item at the end of the edge.""" + + +class PullRequestTimelineItemsConnection(sgqlc.types.relay.Connection): + """The connection type for PullRequestTimelineItems.""" + + __schema__ = github_schema + __field_names__ = ("edges", "filtered_count", "nodes", "page_count", "page_info", "total_count", "updated_at") + edges = sgqlc.types.Field(sgqlc.types.list_of("PullRequestTimelineItemsEdge"), graphql_name="edges") + """A list of edges.""" + + filtered_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="filteredCount") + """Identifies the count of items after applying `before` and `after` + filters. + """ + + nodes = sgqlc.types.Field(sgqlc.types.list_of("PullRequestTimelineItems"), graphql_name="nodes") + """A list of nodes.""" + + page_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="pageCount") + """Identifies the count of items after applying `before`/`after` + filters and `first`/`last`/`skip` slicing. + """ + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the timeline was last updated.""" + + +class PullRequestTimelineItemsEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("PullRequestTimelineItems", graphql_name="node") + """The item at the end of the edge.""" + + +class PushAllowanceConnection(sgqlc.types.relay.Connection): + """The connection type for PushAllowance.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("PushAllowanceEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("PushAllowance"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class PushAllowanceEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("PushAllowance", graphql_name="node") + """The item at the end of the edge.""" + + +class Query(sgqlc.types.Type): + """The query root of GitHub's GraphQL interface.""" + + __schema__ = github_schema + __field_names__ = ( + "code_of_conduct", + "codes_of_conduct", + "enterprise", + "enterprise_administrator_invitation", + "enterprise_administrator_invitation_by_token", + "license", + "licenses", + "marketplace_categories", + "marketplace_category", + "marketplace_listing", + "marketplace_listings", + "meta", + "node", + "nodes", + "organization", + "rate_limit", + "relay", + "repository", + "repository_owner", + "resource", + "search", + "security_advisories", + "security_advisory", + "security_vulnerabilities", + "sponsorables", + "topic", + "user", + "viewer", + ) + code_of_conduct = sgqlc.types.Field( + "CodeOfConduct", + graphql_name="codeOfConduct", + args=sgqlc.types.ArgDict((("key", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="key", default=None)),)), + ) + """Look up a code of conduct by its key + + Arguments: + + * `key` (`String!`): The code of conduct's key + """ + + codes_of_conduct = sgqlc.types.Field(sgqlc.types.list_of("CodeOfConduct"), graphql_name="codesOfConduct") + """Look up a code of conduct by its key""" + + enterprise = sgqlc.types.Field( + "Enterprise", + graphql_name="enterprise", + args=sgqlc.types.ArgDict( + ( + ("slug", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="slug", default=None)), + ("invitation_token", sgqlc.types.Arg(String, graphql_name="invitationToken", default=None)), + ) + ), + ) + """Look up an enterprise by URL slug. + + Arguments: + + * `slug` (`String!`): The enterprise URL slug. + * `invitation_token` (`String`): The enterprise invitation token. + """ + + enterprise_administrator_invitation = sgqlc.types.Field( + "EnterpriseAdministratorInvitation", + graphql_name="enterpriseAdministratorInvitation", + args=sgqlc.types.ArgDict( + ( + ("user_login", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="userLogin", default=None)), + ("enterprise_slug", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="enterpriseSlug", default=None)), + ("role", sgqlc.types.Arg(sgqlc.types.non_null(EnterpriseAdministratorRole), graphql_name="role", default=None)), + ) + ), + ) + """Look up a pending enterprise administrator invitation by invitee, + enterprise and role. + + Arguments: + + * `user_login` (`String!`): The login of the user invited to join + the business. + * `enterprise_slug` (`String!`): The slug of the enterprise the + user was invited to join. + * `role` (`EnterpriseAdministratorRole!`): The role for the + business member invitation. + """ + + enterprise_administrator_invitation_by_token = sgqlc.types.Field( + "EnterpriseAdministratorInvitation", + graphql_name="enterpriseAdministratorInvitationByToken", + args=sgqlc.types.ArgDict( + (("invitation_token", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="invitationToken", default=None)),) + ), + ) + """Look up a pending enterprise administrator invitation by + invitation token. + + Arguments: + + * `invitation_token` (`String!`): The invitation token sent with + the invitation email. + """ + + license = sgqlc.types.Field( + "License", + graphql_name="license", + args=sgqlc.types.ArgDict((("key", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="key", default=None)),)), + ) + """Look up an open source license by its key + + Arguments: + + * `key` (`String!`): The license's downcased SPDX ID + """ + + licenses = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of("License")), graphql_name="licenses") + """Return a list of known open source licenses""" + + marketplace_categories = sgqlc.types.Field( + sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null("MarketplaceCategory"))), + graphql_name="marketplaceCategories", + args=sgqlc.types.ArgDict( + ( + ( + "include_categories", + sgqlc.types.Arg(sgqlc.types.list_of(sgqlc.types.non_null(String)), graphql_name="includeCategories", default=None), + ), + ("exclude_empty", sgqlc.types.Arg(Boolean, graphql_name="excludeEmpty", default=None)), + ("exclude_subcategories", sgqlc.types.Arg(Boolean, graphql_name="excludeSubcategories", default=None)), + ) + ), + ) + """Get alphabetically sorted list of Marketplace categories + + Arguments: + + * `include_categories` (`[String!]`): Return only the specified + categories. + * `exclude_empty` (`Boolean`): Exclude categories with no + listings. + * `exclude_subcategories` (`Boolean`): Returns top level + categories only, excluding any subcategories. + """ + + marketplace_category = sgqlc.types.Field( + "MarketplaceCategory", + graphql_name="marketplaceCategory", + args=sgqlc.types.ArgDict( + ( + ("slug", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="slug", default=None)), + ("use_topic_aliases", sgqlc.types.Arg(Boolean, graphql_name="useTopicAliases", default=None)), + ) + ), + ) + """Look up a Marketplace category by its slug. + + Arguments: + + * `slug` (`String!`): The URL slug of the category. + * `use_topic_aliases` (`Boolean`): Also check topic aliases for + the category slug + """ + + marketplace_listing = sgqlc.types.Field( + "MarketplaceListing", + graphql_name="marketplaceListing", + args=sgqlc.types.ArgDict((("slug", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="slug", default=None)),)), + ) + """Look up a single Marketplace listing + + Arguments: + + * `slug` (`String!`): Select the listing that matches this slug. + It's the short name of the listing used in its URL. + """ + + marketplace_listings = sgqlc.types.Field( + sgqlc.types.non_null(MarketplaceListingConnection), + graphql_name="marketplaceListings", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("category_slug", sgqlc.types.Arg(String, graphql_name="categorySlug", default=None)), + ("use_topic_aliases", sgqlc.types.Arg(Boolean, graphql_name="useTopicAliases", default=None)), + ("viewer_can_admin", sgqlc.types.Arg(Boolean, graphql_name="viewerCanAdmin", default=None)), + ("admin_id", sgqlc.types.Arg(ID, graphql_name="adminId", default=None)), + ("organization_id", sgqlc.types.Arg(ID, graphql_name="organizationId", default=None)), + ("all_states", sgqlc.types.Arg(Boolean, graphql_name="allStates", default=None)), + ("slugs", sgqlc.types.Arg(sgqlc.types.list_of(String), graphql_name="slugs", default=None)), + ("primary_category_only", sgqlc.types.Arg(Boolean, graphql_name="primaryCategoryOnly", default=False)), + ("with_free_trials_only", sgqlc.types.Arg(Boolean, graphql_name="withFreeTrialsOnly", default=False)), + ) + ), + ) + """Look up Marketplace listings + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `category_slug` (`String`): Select only listings with the given + category. + * `use_topic_aliases` (`Boolean`): Also check topic aliases for + the category slug + * `viewer_can_admin` (`Boolean`): Select listings to which user + has admin access. If omitted, listings visible to the viewer are + returned. + * `admin_id` (`ID`): Select listings that can be administered by + the specified user. + * `organization_id` (`ID`): Select listings for products owned by + the specified organization. + * `all_states` (`Boolean`): Select listings visible to the viewer + even if they are not approved. If omitted or false, only + approved listings will be returned. + * `slugs` (`[String]`): Select the listings with these slugs, if + they are visible to the viewer. + * `primary_category_only` (`Boolean`): Select only listings where + the primary category matches the given category slug. (default: + `false`) + * `with_free_trials_only` (`Boolean`): Select only listings that + offer a free trial. (default: `false`) + """ + + meta = sgqlc.types.Field(sgqlc.types.non_null(GitHubMetadata), graphql_name="meta") + """Return information about the GitHub instance""" + + node = sgqlc.types.Field( + Node, + graphql_name="node", + args=sgqlc.types.ArgDict((("id", sgqlc.types.Arg(sgqlc.types.non_null(ID), graphql_name="id", default=None)),)), + ) + """Fetches an object given its ID. + + Arguments: + + * `id` (`ID!`): ID of the object. + """ + + nodes = sgqlc.types.Field( + sgqlc.types.non_null(sgqlc.types.list_of(Node)), + graphql_name="nodes", + args=sgqlc.types.ArgDict( + ( + ( + "ids", + sgqlc.types.Arg(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(ID))), graphql_name="ids", default=None), + ), + ) + ), + ) + """Lookup nodes by a list of IDs. + + Arguments: + + * `ids` (`[ID!]!`): The list of node IDs. + """ + + organization = sgqlc.types.Field( + "Organization", + graphql_name="organization", + args=sgqlc.types.ArgDict((("login", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="login", default=None)),)), + ) + """Lookup a organization by login. + + Arguments: + + * `login` (`String!`): The organization's login. + """ + + rate_limit = sgqlc.types.Field( + "RateLimit", + graphql_name="rateLimit", + args=sgqlc.types.ArgDict((("dry_run", sgqlc.types.Arg(Boolean, graphql_name="dryRun", default=False)),)), + ) + """The client's rate limit information. + + Arguments: + + * `dry_run` (`Boolean`): If true, calculate the cost for the query + without evaluating it (default: `false`) + """ + + relay = sgqlc.types.Field(sgqlc.types.non_null("Query"), graphql_name="relay") + """Hack to workaround https://github.com/facebook/relay/issues/112 + re-exposing the root query object + """ + + repository = sgqlc.types.Field( + "Repository", + graphql_name="repository", + args=sgqlc.types.ArgDict( + ( + ("owner", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="owner", default=None)), + ("name", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="name", default=None)), + ("follow_renames", sgqlc.types.Arg(Boolean, graphql_name="followRenames", default=True)), + ) + ), + ) + """Lookup a given repository by the owner and repository name. + + Arguments: + + * `owner` (`String!`): The login field of a user or organization + * `name` (`String!`): The name of the repository + * `follow_renames` (`Boolean`): Follow repository renames. If + disabled, a repository referenced by its old name will return an + error. (default: `true`) + """ + + repository_owner = sgqlc.types.Field( + "RepositoryOwner", + graphql_name="repositoryOwner", + args=sgqlc.types.ArgDict((("login", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="login", default=None)),)), + ) + """Lookup a repository owner (ie. either a User or an Organization) + by login. + + Arguments: + + * `login` (`String!`): The username to lookup the owner by. + """ + + resource = sgqlc.types.Field( + "UniformResourceLocatable", + graphql_name="resource", + args=sgqlc.types.ArgDict((("url", sgqlc.types.Arg(sgqlc.types.non_null(URI), graphql_name="url", default=None)),)), + ) + """Lookup resource by a URL. + + Arguments: + + * `url` (`URI!`): The URL. + """ + + search = sgqlc.types.Field( + sgqlc.types.non_null("SearchResultItemConnection"), + graphql_name="search", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("query", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="query", default=None)), + ("type", sgqlc.types.Arg(sgqlc.types.non_null(SearchType), graphql_name="type", default=None)), + ) + ), + ) + """Perform a search across resources. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `query` (`String!`): The search string to look for. + * `type` (`SearchType!`): The types of search items to search + within. + """ + + security_advisories = sgqlc.types.Field( + sgqlc.types.non_null("SecurityAdvisoryConnection"), + graphql_name="securityAdvisories", + args=sgqlc.types.ArgDict( + ( + ( + "order_by", + sgqlc.types.Arg(SecurityAdvisoryOrder, graphql_name="orderBy", default={"field": "UPDATED_AT", "direction": "DESC"}), + ), + ("identifier", sgqlc.types.Arg(SecurityAdvisoryIdentifierFilter, graphql_name="identifier", default=None)), + ("published_since", sgqlc.types.Arg(DateTime, graphql_name="publishedSince", default=None)), + ("updated_since", sgqlc.types.Arg(DateTime, graphql_name="updatedSince", default=None)), + ( + "classifications", + sgqlc.types.Arg( + sgqlc.types.list_of(sgqlc.types.non_null(SecurityAdvisoryClassification)), + graphql_name="classifications", + default=None, + ), + ), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """GitHub Security Advisories + + Arguments: + + * `order_by` (`SecurityAdvisoryOrder`): Ordering options for the + returned topics. (default: `{field: UPDATED_AT, direction: + DESC}`) + * `identifier` (`SecurityAdvisoryIdentifierFilter`): Filter + advisories by identifier, e.g. GHSA or CVE. + * `published_since` (`DateTime`): Filter advisories to those + published since a time in the past. + * `updated_since` (`DateTime`): Filter advisories to those updated + since a time in the past. + * `classifications` (`[SecurityAdvisoryClassification!]`): A list + of classifications to filter advisories by. + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + security_advisory = sgqlc.types.Field( + "SecurityAdvisory", + graphql_name="securityAdvisory", + args=sgqlc.types.ArgDict((("ghsa_id", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="ghsaId", default=None)),)), + ) + """Fetch a Security Advisory by its GHSA ID + + Arguments: + + * `ghsa_id` (`String!`): GitHub Security Advisory ID. + """ + + security_vulnerabilities = sgqlc.types.Field( + sgqlc.types.non_null("SecurityVulnerabilityConnection"), + graphql_name="securityVulnerabilities", + args=sgqlc.types.ArgDict( + ( + ( + "order_by", + sgqlc.types.Arg( + SecurityVulnerabilityOrder, graphql_name="orderBy", default={"field": "UPDATED_AT", "direction": "DESC"} + ), + ), + ("ecosystem", sgqlc.types.Arg(SecurityAdvisoryEcosystem, graphql_name="ecosystem", default=None)), + ("package", sgqlc.types.Arg(String, graphql_name="package", default=None)), + ( + "severities", + sgqlc.types.Arg( + sgqlc.types.list_of(sgqlc.types.non_null(SecurityAdvisorySeverity)), graphql_name="severities", default=None + ), + ), + ( + "classifications", + sgqlc.types.Arg( + sgqlc.types.list_of(sgqlc.types.non_null(SecurityAdvisoryClassification)), + graphql_name="classifications", + default=None, + ), + ), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """Software Vulnerabilities documented by GitHub Security Advisories + + Arguments: + + * `order_by` (`SecurityVulnerabilityOrder`): Ordering options for + the returned topics. (default: `{field: UPDATED_AT, direction: + DESC}`) + * `ecosystem` (`SecurityAdvisoryEcosystem`): An ecosystem to + filter vulnerabilities by. + * `package` (`String`): A package name to filter vulnerabilities + by. + * `severities` (`[SecurityAdvisorySeverity!]`): A list of + severities to filter vulnerabilities by. + * `classifications` (`[SecurityAdvisoryClassification!]`): A list + of advisory classifications to filter vulnerabilities by. + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + sponsorables = sgqlc.types.Field( + sgqlc.types.non_null("SponsorableItemConnection"), + graphql_name="sponsorables", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("order_by", sgqlc.types.Arg(SponsorableOrder, graphql_name="orderBy", default={"field": "LOGIN", "direction": "ASC"})), + ("only_dependencies", sgqlc.types.Arg(Boolean, graphql_name="onlyDependencies", default=False)), + ("org_login_for_dependencies", sgqlc.types.Arg(String, graphql_name="orgLoginForDependencies", default=None)), + ("dependency_ecosystem", sgqlc.types.Arg(SecurityAdvisoryEcosystem, graphql_name="dependencyEcosystem", default=None)), + ("ecosystem", sgqlc.types.Arg(DependencyGraphEcosystem, graphql_name="ecosystem", default=None)), + ) + ), + ) + """Users and organizations who can be sponsored via GitHub Sponsors. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `order_by` (`SponsorableOrder`): Ordering options for users and + organizations returned from the connection. (default: `{field: + LOGIN, direction: ASC}`) + * `only_dependencies` (`Boolean`): Whether only sponsorables who + own the viewer's dependencies will be returned. Must be + authenticated to use. Can check an organization instead for + their dependencies owned by sponsorables by passing + orgLoginForDependencies. (default: `false`) + * `org_login_for_dependencies` (`String`): Optional organization + username for whose dependencies should be checked. Used when + onlyDependencies = true. Omit to check your own dependencies. If + you are not an administrator of the organization, only + dependencies from its public repositories will be considered. + * `dependency_ecosystem` (`SecurityAdvisoryEcosystem`): Optional + filter for which dependencies should be checked for sponsorable + owners. Only sponsorable owners of dependencies in this + ecosystem will be included. Used when onlyDependencies = true. + **Upcoming Change on 2022-07-01 UTC** **Description:** + `dependencyEcosystem` will be removed. Use the ecosystem + argument instead. **Reason:** The type is switching from + SecurityAdvisoryEcosystem to DependencyGraphEcosystem. + * `ecosystem` (`DependencyGraphEcosystem`): Optional filter for + which dependencies should be checked for sponsorable owners. + Only sponsorable owners of dependencies in this ecosystem will + be included. Used when onlyDependencies = true. + """ + + topic = sgqlc.types.Field( + "Topic", + graphql_name="topic", + args=sgqlc.types.ArgDict((("name", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="name", default=None)),)), + ) + """Look up a topic by name. + + Arguments: + + * `name` (`String!`): The topic's name. + """ + + user = sgqlc.types.Field( + "User", + graphql_name="user", + args=sgqlc.types.ArgDict((("login", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="login", default=None)),)), + ) + """Lookup a user by login. + + Arguments: + + * `login` (`String!`): The user's login. + """ + + viewer = sgqlc.types.Field(sgqlc.types.non_null("User"), graphql_name="viewer") + """The currently authenticated user.""" + + +class RateLimit(sgqlc.types.Type): + """Represents the client's rate limit.""" + + __schema__ = github_schema + __field_names__ = ("cost", "limit", "node_count", "remaining", "reset_at", "used") + cost = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="cost") + """The point cost for the current query counting against the rate + limit. + """ + + limit = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="limit") + """The maximum number of points the client is permitted to consume in + a 60 minute window. + """ + + node_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="nodeCount") + """The maximum number of nodes this query may return""" + + remaining = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="remaining") + """The number of points remaining in the current rate limit window.""" + + reset_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="resetAt") + """The time at which the current rate limit window resets in UTC + epoch seconds. + """ + + used = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="used") + """The number of points used in the current rate limit window.""" + + +class Reactable(sgqlc.types.Interface): + """Represents a subject that can be reacted on.""" + + __schema__ = github_schema + __field_names__ = ("database_id", "id", "reaction_groups", "reactions", "viewer_can_react") + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + + reaction_groups = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null("ReactionGroup")), graphql_name="reactionGroups") + """A list of reactions grouped by content left on the subject.""" + + reactions = sgqlc.types.Field( + sgqlc.types.non_null("ReactionConnection"), + graphql_name="reactions", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("content", sgqlc.types.Arg(ReactionContent, graphql_name="content", default=None)), + ("order_by", sgqlc.types.Arg(ReactionOrder, graphql_name="orderBy", default=None)), + ) + ), + ) + """A list of Reactions left on the Issue. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `content` (`ReactionContent`): Allows filtering Reactions by + emoji. + * `order_by` (`ReactionOrder`): Allows specifying the order in + which reactions are returned. + """ + + viewer_can_react = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanReact") + """Can user react to this subject""" + + +class ReactingUserConnection(sgqlc.types.relay.Connection): + """The connection type for User.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("ReactingUserEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("User"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class ReactingUserEdge(sgqlc.types.Type): + """Represents a user that's made a reaction.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node", "reacted_at") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field(sgqlc.types.non_null("User"), graphql_name="node") + + reacted_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="reactedAt") + """The moment when the user made the reaction.""" + + +class ReactionConnection(sgqlc.types.relay.Connection): + """A list of reactions that have been left on the subject.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count", "viewer_has_reacted") + edges = sgqlc.types.Field(sgqlc.types.list_of("ReactionEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("Reaction"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + viewer_has_reacted = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerHasReacted") + """Whether or not the authenticated user has left a reaction on the + subject. + """ + + +class ReactionEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("Reaction", graphql_name="node") + """The item at the end of the edge.""" + + +class ReactionGroup(sgqlc.types.Type): + """A group of emoji reactions to a particular piece of content.""" + + __schema__ = github_schema + __field_names__ = ("content", "created_at", "reactors", "subject", "viewer_has_reacted") + content = sgqlc.types.Field(sgqlc.types.non_null(ReactionContent), graphql_name="content") + """Identifies the emoji reaction.""" + + created_at = sgqlc.types.Field(DateTime, graphql_name="createdAt") + """Identifies when the reaction was created.""" + + reactors = sgqlc.types.Field( + sgqlc.types.non_null("ReactorConnection"), + graphql_name="reactors", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """Reactors to the reaction subject with the emotion represented by + this reaction group. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + subject = sgqlc.types.Field(sgqlc.types.non_null(Reactable), graphql_name="subject") + """The subject that was reacted to.""" + + viewer_has_reacted = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerHasReacted") + """Whether or not the authenticated user has left a reaction on the + subject. + """ + + +class ReactorConnection(sgqlc.types.relay.Connection): + """The connection type for Reactor.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("ReactorEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("Reactor"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class ReactorEdge(sgqlc.types.Type): + """Represents an author of a reaction.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node", "reacted_at") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field(sgqlc.types.non_null("Reactor"), graphql_name="node") + """The author of the reaction.""" + + reacted_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="reactedAt") + """The moment when the user made the reaction.""" + + +class RefConnection(sgqlc.types.relay.Connection): + """The connection type for Ref.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("RefEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("Ref"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class RefEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("Ref", graphql_name="node") + """The item at the end of the edge.""" + + +class RefUpdateRule(sgqlc.types.Type): + """A ref update rules for a viewer.""" + + __schema__ = github_schema + __field_names__ = ( + "allows_deletions", + "allows_force_pushes", + "blocks_creations", + "pattern", + "required_approving_review_count", + "required_status_check_contexts", + "requires_code_owner_reviews", + "requires_conversation_resolution", + "requires_linear_history", + "requires_signatures", + "viewer_allowed_to_dismiss_reviews", + "viewer_can_push", + ) + allows_deletions = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="allowsDeletions") + """Can this branch be deleted.""" + + allows_force_pushes = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="allowsForcePushes") + """Are force pushes allowed on this branch.""" + + blocks_creations = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="blocksCreations") + """Can matching branches be created.""" + + pattern = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="pattern") + """Identifies the protection rule pattern.""" + + required_approving_review_count = sgqlc.types.Field(Int, graphql_name="requiredApprovingReviewCount") + """Number of approving reviews required to update matching branches.""" + + required_status_check_contexts = sgqlc.types.Field(sgqlc.types.list_of(String), graphql_name="requiredStatusCheckContexts") + """List of required status check contexts that must pass for commits + to be accepted to matching branches. + """ + + requires_code_owner_reviews = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="requiresCodeOwnerReviews") + """Are reviews from code owners required to update matching branches.""" + + requires_conversation_resolution = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="requiresConversationResolution") + """Are conversations required to be resolved before merging.""" + + requires_linear_history = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="requiresLinearHistory") + """Are merge commits prohibited from being pushed to this branch.""" + + requires_signatures = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="requiresSignatures") + """Are commits required to be signed.""" + + viewer_allowed_to_dismiss_reviews = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerAllowedToDismissReviews") + """Is the viewer allowed to dismiss reviews.""" + + viewer_can_push = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanPush") + """Can the viewer push to the branch""" + + +class RegenerateEnterpriseIdentityProviderRecoveryCodesPayload(sgqlc.types.Type): + """Autogenerated return type of + RegenerateEnterpriseIdentityProviderRecoveryCodes + """ + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "identity_provider") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + identity_provider = sgqlc.types.Field("EnterpriseIdentityProvider", graphql_name="identityProvider") + """The identity provider for the enterprise.""" + + +class RegenerateVerifiableDomainTokenPayload(sgqlc.types.Type): + """Autogenerated return type of RegenerateVerifiableDomainToken""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "verification_token") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + verification_token = sgqlc.types.Field(String, graphql_name="verificationToken") + """The verification token that was generated.""" + + +class RejectDeploymentsPayload(sgqlc.types.Type): + """Autogenerated return type of RejectDeployments""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "deployments") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + deployments = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null("Deployment")), graphql_name="deployments") + """The affected deployments.""" + + +class ReleaseAssetConnection(sgqlc.types.relay.Connection): + """The connection type for ReleaseAsset.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("ReleaseAssetEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("ReleaseAsset"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class ReleaseAssetEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("ReleaseAsset", graphql_name="node") + """The item at the end of the edge.""" + + +class ReleaseConnection(sgqlc.types.relay.Connection): + """The connection type for Release.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("ReleaseEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("Release"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class ReleaseEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("Release", graphql_name="node") + """The item at the end of the edge.""" + + +class RemoveAssigneesFromAssignablePayload(sgqlc.types.Type): + """Autogenerated return type of RemoveAssigneesFromAssignable""" + + __schema__ = github_schema + __field_names__ = ("assignable", "client_mutation_id") + assignable = sgqlc.types.Field(Assignable, graphql_name="assignable") + """The item that was unassigned.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class RemoveEnterpriseAdminPayload(sgqlc.types.Type): + """Autogenerated return type of RemoveEnterpriseAdmin""" + + __schema__ = github_schema + __field_names__ = ("admin", "client_mutation_id", "enterprise", "message", "viewer") + admin = sgqlc.types.Field("User", graphql_name="admin") + """The user who was removed as an administrator.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + enterprise = sgqlc.types.Field("Enterprise", graphql_name="enterprise") + """The updated enterprise.""" + + message = sgqlc.types.Field(String, graphql_name="message") + """A message confirming the result of removing an administrator.""" + + viewer = sgqlc.types.Field("User", graphql_name="viewer") + """The viewer performing the mutation.""" + + +class RemoveEnterpriseIdentityProviderPayload(sgqlc.types.Type): + """Autogenerated return type of RemoveEnterpriseIdentityProvider""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "identity_provider") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + identity_provider = sgqlc.types.Field("EnterpriseIdentityProvider", graphql_name="identityProvider") + """The identity provider that was removed from the enterprise.""" + + +class RemoveEnterpriseOrganizationPayload(sgqlc.types.Type): + """Autogenerated return type of RemoveEnterpriseOrganization""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "enterprise", "organization", "viewer") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + enterprise = sgqlc.types.Field("Enterprise", graphql_name="enterprise") + """The updated enterprise.""" + + organization = sgqlc.types.Field("Organization", graphql_name="organization") + """The organization that was removed from the enterprise.""" + + viewer = sgqlc.types.Field("User", graphql_name="viewer") + """The viewer performing the mutation.""" + + +class RemoveEnterpriseSupportEntitlementPayload(sgqlc.types.Type): + """Autogenerated return type of RemoveEnterpriseSupportEntitlement""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "message") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + message = sgqlc.types.Field(String, graphql_name="message") + """A message confirming the result of removing the support + entitlement. + """ + + +class RemoveLabelsFromLabelablePayload(sgqlc.types.Type): + """Autogenerated return type of RemoveLabelsFromLabelable""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "labelable") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + labelable = sgqlc.types.Field(Labelable, graphql_name="labelable") + """The Labelable the labels were removed from.""" + + +class RemoveOutsideCollaboratorPayload(sgqlc.types.Type): + """Autogenerated return type of RemoveOutsideCollaborator""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "removed_user") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + removed_user = sgqlc.types.Field("User", graphql_name="removedUser") + """The user that was removed as an outside collaborator.""" + + +class RemoveReactionPayload(sgqlc.types.Type): + """Autogenerated return type of RemoveReaction""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "reaction", "subject") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + reaction = sgqlc.types.Field("Reaction", graphql_name="reaction") + """The reaction object.""" + + subject = sgqlc.types.Field(Reactable, graphql_name="subject") + """The reactable subject.""" + + +class RemoveStarPayload(sgqlc.types.Type): + """Autogenerated return type of RemoveStar""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "starrable") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + starrable = sgqlc.types.Field("Starrable", graphql_name="starrable") + """The starrable.""" + + +class RemoveUpvotePayload(sgqlc.types.Type): + """Autogenerated return type of RemoveUpvote""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "subject") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + subject = sgqlc.types.Field("Votable", graphql_name="subject") + """The votable subject.""" + + +class ReopenIssuePayload(sgqlc.types.Type): + """Autogenerated return type of ReopenIssue""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "issue") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + issue = sgqlc.types.Field("Issue", graphql_name="issue") + """The issue that was opened.""" + + +class ReopenPullRequestPayload(sgqlc.types.Type): + """Autogenerated return type of ReopenPullRequest""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "pull_request") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + pull_request = sgqlc.types.Field("PullRequest", graphql_name="pullRequest") + """The pull request that was reopened.""" + + +class RepositoryAuditEntryData(sgqlc.types.Interface): + """Metadata for an audit entry with action repo.*""" + + __schema__ = github_schema + __field_names__ = ("repository", "repository_name", "repository_resource_path", "repository_url") + repository = sgqlc.types.Field("Repository", graphql_name="repository") + """The repository associated with the action""" + + repository_name = sgqlc.types.Field(String, graphql_name="repositoryName") + """The name of the repository""" + + repository_resource_path = sgqlc.types.Field(URI, graphql_name="repositoryResourcePath") + """The HTTP path for the repository""" + + repository_url = sgqlc.types.Field(URI, graphql_name="repositoryUrl") + """The HTTP URL for the repository""" + + +class RepositoryCodeowners(sgqlc.types.Type): + """Information extracted from a repository's `CODEOWNERS` file.""" + + __schema__ = github_schema + __field_names__ = ("errors",) + errors = sgqlc.types.Field( + sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null("RepositoryCodeownersError"))), graphql_name="errors" + ) + """Any problems that were encountered while parsing the `CODEOWNERS` + file. + """ + + +class RepositoryCodeownersError(sgqlc.types.Type): + """An error in a `CODEOWNERS` file.""" + + __schema__ = github_schema + __field_names__ = ("column", "kind", "line", "message", "path", "source", "suggestion") + column = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="column") + """The column number where the error occurs.""" + + kind = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="kind") + """A short string describing the type of error.""" + + line = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="line") + """The line number where the error occurs.""" + + message = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="message") + """A complete description of the error, combining information from + other fields. + """ + + path = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="path") + """The path to the file when the error occurs.""" + + source = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="source") + """The content of the line where the error occurs.""" + + suggestion = sgqlc.types.Field(String, graphql_name="suggestion") + """A suggestion of how to fix the error.""" + + +class RepositoryCollaboratorConnection(sgqlc.types.relay.Connection): + """The connection type for User.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("RepositoryCollaboratorEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("User"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class RepositoryCollaboratorEdge(sgqlc.types.Type): + """Represents a user who is a collaborator of a repository.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node", "permission", "permission_sources") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field(sgqlc.types.non_null("User"), graphql_name="node") + + permission = sgqlc.types.Field(sgqlc.types.non_null(RepositoryPermission), graphql_name="permission") + """The permission the user has on the repository.""" + + permission_sources = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(PermissionSource)), graphql_name="permissionSources") + """A list of sources for the user's access to the repository.""" + + +class RepositoryConnection(sgqlc.types.relay.Connection): + """A list of repositories owned by the subject.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count", "total_disk_usage") + edges = sgqlc.types.Field(sgqlc.types.list_of("RepositoryEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("Repository"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + total_disk_usage = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalDiskUsage") + """The total size in kilobytes of all repositories in the connection.""" + + +class RepositoryContactLink(sgqlc.types.Type): + """A repository contact link.""" + + __schema__ = github_schema + __field_names__ = ("about", "name", "url") + about = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="about") + """The contact link purpose.""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The contact link name.""" + + url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="url") + """The contact link URL.""" + + +class RepositoryDiscussionAuthor(sgqlc.types.Interface): + """Represents an author of discussions in repositories.""" + + __schema__ = github_schema + __field_names__ = ("repository_discussions",) + repository_discussions = sgqlc.types.Field( + sgqlc.types.non_null(DiscussionConnection), + graphql_name="repositoryDiscussions", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ( + "order_by", + sgqlc.types.Arg(DiscussionOrder, graphql_name="orderBy", default={"field": "CREATED_AT", "direction": "DESC"}), + ), + ("repository_id", sgqlc.types.Arg(ID, graphql_name="repositoryId", default=None)), + ("answered", sgqlc.types.Arg(Boolean, graphql_name="answered", default=None)), + ) + ), + ) + """Discussions this user has started. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `order_by` (`DiscussionOrder`): Ordering options for discussions + returned from the connection. (default: `{field: CREATED_AT, + direction: DESC}`) + * `repository_id` (`ID`): Filter discussions to only those in a + specific repository. + * `answered` (`Boolean`): Filter discussions to only those that + have been answered or not. Defaults to including both answered + and unanswered discussions. (default: `null`) + """ + + +class RepositoryDiscussionCommentAuthor(sgqlc.types.Interface): + """Represents an author of discussion comments in repositories.""" + + __schema__ = github_schema + __field_names__ = ("repository_discussion_comments",) + repository_discussion_comments = sgqlc.types.Field( + sgqlc.types.non_null(DiscussionCommentConnection), + graphql_name="repositoryDiscussionComments", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("repository_id", sgqlc.types.Arg(ID, graphql_name="repositoryId", default=None)), + ("only_answers", sgqlc.types.Arg(Boolean, graphql_name="onlyAnswers", default=False)), + ) + ), + ) + """Discussion comments this user has authored. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `repository_id` (`ID`): Filter discussion comments to only those + in a specific repository. + * `only_answers` (`Boolean`): Filter discussion comments to only + those that were marked as the answer (default: `false`) + """ + + +class RepositoryEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("Repository", graphql_name="node") + """The item at the end of the edge.""" + + +class RepositoryInfo(sgqlc.types.Interface): + """A subset of repository info.""" + + __schema__ = github_schema + __field_names__ = ( + "created_at", + "description", + "description_html", + "fork_count", + "has_issues_enabled", + "has_projects_enabled", + "has_wiki_enabled", + "homepage_url", + "is_archived", + "is_fork", + "is_in_organization", + "is_locked", + "is_mirror", + "is_private", + "is_template", + "license_info", + "lock_reason", + "mirror_url", + "name", + "name_with_owner", + "open_graph_image_url", + "owner", + "pushed_at", + "resource_path", + "short_description_html", + "updated_at", + "url", + "uses_custom_open_graph_image", + "visibility", + ) + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + description = sgqlc.types.Field(String, graphql_name="description") + """The description of the repository.""" + + description_html = sgqlc.types.Field(sgqlc.types.non_null(HTML), graphql_name="descriptionHTML") + """The description of the repository rendered to HTML.""" + + fork_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="forkCount") + """Returns how many forks there are of this repository in the whole + network. + """ + + has_issues_enabled = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="hasIssuesEnabled") + """Indicates if the repository has issues feature enabled.""" + + has_projects_enabled = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="hasProjectsEnabled") + """Indicates if the repository has the Projects feature enabled.""" + + has_wiki_enabled = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="hasWikiEnabled") + """Indicates if the repository has wiki feature enabled.""" + + homepage_url = sgqlc.types.Field(URI, graphql_name="homepageUrl") + """The repository's URL.""" + + is_archived = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isArchived") + """Indicates if the repository is unmaintained.""" + + is_fork = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isFork") + """Identifies if the repository is a fork.""" + + is_in_organization = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isInOrganization") + """Indicates if a repository is either owned by an organization, or + is a private fork of an organization repository. + """ + + is_locked = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isLocked") + """Indicates if the repository has been locked or not.""" + + is_mirror = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isMirror") + """Identifies if the repository is a mirror.""" + + is_private = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isPrivate") + """Identifies if the repository is private or internal.""" + + is_template = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isTemplate") + """Identifies if the repository is a template that can be used to + generate new repositories. + """ + + license_info = sgqlc.types.Field("License", graphql_name="licenseInfo") + """The license associated with the repository""" + + lock_reason = sgqlc.types.Field(RepositoryLockReason, graphql_name="lockReason") + """The reason the repository has been locked.""" + + mirror_url = sgqlc.types.Field(URI, graphql_name="mirrorUrl") + """The repository's original mirror URL.""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The name of the repository.""" + + name_with_owner = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="nameWithOwner") + """The repository's name with owner.""" + + open_graph_image_url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="openGraphImageUrl") + """The image used to represent this repository in Open Graph data.""" + + owner = sgqlc.types.Field(sgqlc.types.non_null("RepositoryOwner"), graphql_name="owner") + """The User owner of the repository.""" + + pushed_at = sgqlc.types.Field(DateTime, graphql_name="pushedAt") + """Identifies when the repository was last pushed to.""" + + resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="resourcePath") + """The HTTP path for this repository""" + + short_description_html = sgqlc.types.Field( + sgqlc.types.non_null(HTML), + graphql_name="shortDescriptionHTML", + args=sgqlc.types.ArgDict((("limit", sgqlc.types.Arg(Int, graphql_name="limit", default=200)),)), + ) + """A description of the repository, rendered to HTML without any + links in it. + + Arguments: + + * `limit` (`Int`): How many characters to return. (default: `200`) + """ + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="url") + """The HTTP URL for this repository""" + + uses_custom_open_graph_image = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="usesCustomOpenGraphImage") + """Whether this repository has a custom image to use with Open Graph + as opposed to being represented by the owner's avatar. + """ + + visibility = sgqlc.types.Field(sgqlc.types.non_null(RepositoryVisibility), graphql_name="visibility") + """Indicates the repository's visibility level.""" + + +class RepositoryInteractionAbility(sgqlc.types.Type): + """Repository interaction limit that applies to this object.""" + + __schema__ = github_schema + __field_names__ = ("expires_at", "limit", "origin") + expires_at = sgqlc.types.Field(DateTime, graphql_name="expiresAt") + """The time the currently active limit expires.""" + + limit = sgqlc.types.Field(sgqlc.types.non_null(RepositoryInteractionLimit), graphql_name="limit") + """The current limit that is enabled on this object.""" + + origin = sgqlc.types.Field(sgqlc.types.non_null(RepositoryInteractionLimitOrigin), graphql_name="origin") + """The origin of the currently active interaction limit.""" + + +class RepositoryInvitationConnection(sgqlc.types.relay.Connection): + """A list of repository invitations.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("RepositoryInvitationEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("RepositoryInvitation"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class RepositoryInvitationEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("RepositoryInvitation", graphql_name="node") + """The item at the end of the edge.""" + + +class RepositoryMigrationConnection(sgqlc.types.relay.Connection): + """The connection type for RepositoryMigration.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("RepositoryMigrationEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("RepositoryMigration"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class RepositoryMigrationEdge(sgqlc.types.Type): + """Represents a repository migration.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("RepositoryMigration", graphql_name="node") + """The item at the end of the edge.""" + + +class RepositoryNode(sgqlc.types.Interface): + """Represents a object that belongs to a repository.""" + + __schema__ = github_schema + __field_names__ = ("repository",) + repository = sgqlc.types.Field(sgqlc.types.non_null("Repository"), graphql_name="repository") + """The repository associated with this node.""" + + +class RepositoryOwner(sgqlc.types.Interface): + """Represents an owner of a Repository.""" + + __schema__ = github_schema + __field_names__ = ("avatar_url", "id", "login", "repositories", "repository", "resource_path", "url") + avatar_url = sgqlc.types.Field( + sgqlc.types.non_null(URI), + graphql_name="avatarUrl", + args=sgqlc.types.ArgDict((("size", sgqlc.types.Arg(Int, graphql_name="size", default=None)),)), + ) + """A URL pointing to the owner's public avatar. + + Arguments: + + * `size` (`Int`): The size of the resulting square image. + """ + + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + + login = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="login") + """The username used to login.""" + + repositories = sgqlc.types.Field( + sgqlc.types.non_null(RepositoryConnection), + graphql_name="repositories", + args=sgqlc.types.ArgDict( + ( + ("privacy", sgqlc.types.Arg(RepositoryPrivacy, graphql_name="privacy", default=None)), + ("order_by", sgqlc.types.Arg(RepositoryOrder, graphql_name="orderBy", default=None)), + ("affiliations", sgqlc.types.Arg(sgqlc.types.list_of(RepositoryAffiliation), graphql_name="affiliations", default=None)), + ( + "owner_affiliations", + sgqlc.types.Arg( + sgqlc.types.list_of(RepositoryAffiliation), graphql_name="ownerAffiliations", default=("OWNER", "COLLABORATOR") + ), + ), + ("is_locked", sgqlc.types.Arg(Boolean, graphql_name="isLocked", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("is_fork", sgqlc.types.Arg(Boolean, graphql_name="isFork", default=None)), + ) + ), + ) + """A list of repositories that the user owns. + + Arguments: + + * `privacy` (`RepositoryPrivacy`): If non-null, filters + repositories according to privacy + * `order_by` (`RepositoryOrder`): Ordering options for + repositories returned from the connection + * `affiliations` (`[RepositoryAffiliation]`): Array of viewer's + affiliation options for repositories returned from the + connection. For example, OWNER will include only repositories + that the current viewer owns. + * `owner_affiliations` (`[RepositoryAffiliation]`): Array of + owner's affiliation options for repositories returned from the + connection. For example, OWNER will include only repositories + that the organization or user being viewed owns. (default: + `[OWNER, COLLABORATOR]`) + * `is_locked` (`Boolean`): If non-null, filters repositories + according to whether they have been locked + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `is_fork` (`Boolean`): If non-null, filters repositories + according to whether they are forks of another repository + """ + + repository = sgqlc.types.Field( + "Repository", + graphql_name="repository", + args=sgqlc.types.ArgDict( + ( + ("name", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="name", default=None)), + ("follow_renames", sgqlc.types.Arg(Boolean, graphql_name="followRenames", default=True)), + ) + ), + ) + """Find Repository. + + Arguments: + + * `name` (`String!`): Name of Repository to find. + * `follow_renames` (`Boolean`): Follow repository renames. If + disabled, a repository referenced by its old name will return an + error. (default: `true`) + """ + + resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="resourcePath") + """The HTTP URL for the owner.""" + + url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="url") + """The HTTP URL for the owner.""" + + +class RepositoryTopicConnection(sgqlc.types.relay.Connection): + """The connection type for RepositoryTopic.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("RepositoryTopicEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("RepositoryTopic"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class RepositoryTopicEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("RepositoryTopic", graphql_name="node") + """The item at the end of the edge.""" + + +class RepositoryVulnerabilityAlertConnection(sgqlc.types.relay.Connection): + """The connection type for RepositoryVulnerabilityAlert.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("RepositoryVulnerabilityAlertEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("RepositoryVulnerabilityAlert"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class RepositoryVulnerabilityAlertEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("RepositoryVulnerabilityAlert", graphql_name="node") + """The item at the end of the edge.""" + + +class RequestReviewsPayload(sgqlc.types.Type): + """Autogenerated return type of RequestReviews""" + + __schema__ = github_schema + __field_names__ = ("actor", "client_mutation_id", "pull_request", "requested_reviewers_edge") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + pull_request = sgqlc.types.Field("PullRequest", graphql_name="pullRequest") + """The pull request that is getting requests.""" + + requested_reviewers_edge = sgqlc.types.Field("UserEdge", graphql_name="requestedReviewersEdge") + """The edge from the pull request to the requested reviewers.""" + + +class RequirableByPullRequest(sgqlc.types.Interface): + """Represents a type that can be required by a pull request for + merging. + """ + + __schema__ = github_schema + __field_names__ = ("is_required",) + is_required = sgqlc.types.Field( + sgqlc.types.non_null(Boolean), + graphql_name="isRequired", + args=sgqlc.types.ArgDict( + ( + ("pull_request_id", sgqlc.types.Arg(ID, graphql_name="pullRequestId", default=None)), + ("pull_request_number", sgqlc.types.Arg(Int, graphql_name="pullRequestNumber", default=None)), + ) + ), + ) + """Whether this is required to pass before merging for a specific + pull request. + + Arguments: + + * `pull_request_id` (`ID`): The id of the pull request this is + required for + * `pull_request_number` (`Int`): The number of the pull request + this is required for + """ + + +class RequiredStatusCheckDescription(sgqlc.types.Type): + """Represents a required status check for a protected branch, but not + any specific run of that check. + """ + + __schema__ = github_schema + __field_names__ = ("app", "context") + app = sgqlc.types.Field("App", graphql_name="app") + """The App that must provide this status in order for it to be + accepted. + """ + + context = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="context") + """The name of this status.""" + + +class RerequestCheckSuitePayload(sgqlc.types.Type): + """Autogenerated return type of RerequestCheckSuite""" + + __schema__ = github_schema + __field_names__ = ("check_suite", "client_mutation_id") + check_suite = sgqlc.types.Field("CheckSuite", graphql_name="checkSuite") + """The requested check suite.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class ResolveReviewThreadPayload(sgqlc.types.Type): + """Autogenerated return type of ResolveReviewThread""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "thread") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + thread = sgqlc.types.Field("PullRequestReviewThread", graphql_name="thread") + """The thread to resolve.""" + + +class ReviewDismissalAllowanceConnection(sgqlc.types.relay.Connection): + """The connection type for ReviewDismissalAllowance.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("ReviewDismissalAllowanceEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("ReviewDismissalAllowance"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class ReviewDismissalAllowanceEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("ReviewDismissalAllowance", graphql_name="node") + """The item at the end of the edge.""" + + +class ReviewRequestConnection(sgqlc.types.relay.Connection): + """The connection type for ReviewRequest.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("ReviewRequestEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("ReviewRequest"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class ReviewRequestEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("ReviewRequest", graphql_name="node") + """The item at the end of the edge.""" + + +class RevokeEnterpriseOrganizationsMigratorRolePayload(sgqlc.types.Type): + """Autogenerated return type of + RevokeEnterpriseOrganizationsMigratorRole + """ + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "organizations") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + organizations = sgqlc.types.Field( + OrganizationConnection, + graphql_name="organizations", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """The organizations that had the migrator role revoked for the given + user. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + +class RevokeMigratorRolePayload(sgqlc.types.Type): + """Autogenerated return type of RevokeMigratorRole""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "success") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + success = sgqlc.types.Field(Boolean, graphql_name="success") + """Did the operation succeed?""" + + +class SavedReplyConnection(sgqlc.types.relay.Connection): + """The connection type for SavedReply.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("SavedReplyEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("SavedReply"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class SavedReplyEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("SavedReply", graphql_name="node") + """The item at the end of the edge.""" + + +class SearchResultItemConnection(sgqlc.types.relay.Connection): + """A list of results that matched against a search query.""" + + __schema__ = github_schema + __field_names__ = ( + "code_count", + "discussion_count", + "edges", + "issue_count", + "nodes", + "page_info", + "repository_count", + "user_count", + "wiki_count", + ) + code_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="codeCount") + """The number of pieces of code that matched the search query.""" + + discussion_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="discussionCount") + """The number of discussions that matched the search query.""" + + edges = sgqlc.types.Field(sgqlc.types.list_of("SearchResultItemEdge"), graphql_name="edges") + """A list of edges.""" + + issue_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="issueCount") + """The number of issues that matched the search query.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("SearchResultItem"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + repository_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="repositoryCount") + """The number of repositories that matched the search query.""" + + user_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="userCount") + """The number of users that matched the search query.""" + + wiki_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="wikiCount") + """The number of wiki pages that matched the search query.""" + + +class SearchResultItemEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node", "text_matches") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("SearchResultItem", graphql_name="node") + """The item at the end of the edge.""" + + text_matches = sgqlc.types.Field(sgqlc.types.list_of("TextMatch"), graphql_name="textMatches") + """Text matches on the result found.""" + + +class SecurityAdvisoryConnection(sgqlc.types.relay.Connection): + """The connection type for SecurityAdvisory.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("SecurityAdvisoryEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("SecurityAdvisory"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class SecurityAdvisoryEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("SecurityAdvisory", graphql_name="node") + """The item at the end of the edge.""" + + +class SecurityAdvisoryIdentifier(sgqlc.types.Type): + """A GitHub Security Advisory Identifier""" + + __schema__ = github_schema + __field_names__ = ("type", "value") + type = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="type") + """The identifier type, e.g. GHSA, CVE""" + + value = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="value") + """The identifier""" + + +class SecurityAdvisoryPackage(sgqlc.types.Type): + """An individual package""" + + __schema__ = github_schema + __field_names__ = ("ecosystem", "name") + ecosystem = sgqlc.types.Field(sgqlc.types.non_null(SecurityAdvisoryEcosystem), graphql_name="ecosystem") + """The ecosystem the package belongs to, e.g. RUBYGEMS, NPM""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The package name""" + + +class SecurityAdvisoryPackageVersion(sgqlc.types.Type): + """An individual package version""" + + __schema__ = github_schema + __field_names__ = ("identifier",) + identifier = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="identifier") + """The package name or version""" + + +class SecurityAdvisoryReference(sgqlc.types.Type): + """A GitHub Security Advisory Reference""" + + __schema__ = github_schema + __field_names__ = ("url",) + url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="url") + """A publicly accessible reference""" + + +class SecurityVulnerability(sgqlc.types.Type): + """An individual vulnerability within an Advisory""" + + __schema__ = github_schema + __field_names__ = ("advisory", "first_patched_version", "package", "severity", "updated_at", "vulnerable_version_range") + advisory = sgqlc.types.Field(sgqlc.types.non_null("SecurityAdvisory"), graphql_name="advisory") + """The Advisory associated with this Vulnerability""" + + first_patched_version = sgqlc.types.Field(SecurityAdvisoryPackageVersion, graphql_name="firstPatchedVersion") + """The first version containing a fix for the vulnerability""" + + package = sgqlc.types.Field(sgqlc.types.non_null(SecurityAdvisoryPackage), graphql_name="package") + """A description of the vulnerable package""" + + severity = sgqlc.types.Field(sgqlc.types.non_null(SecurityAdvisorySeverity), graphql_name="severity") + """The severity of the vulnerability within this package""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """When the vulnerability was last updated""" + + vulnerable_version_range = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="vulnerableVersionRange") + """A string that describes the vulnerable package versions. This + string follows a basic syntax with a few forms. + `= 0.2.0` + denotes a single vulnerable version. + `<= 1.0.8` denotes a + version range up to and including the specified version + `< + 0.1.11` denotes a version range up to, but excluding, the + specified version + `>= 4.3.0, < 4.3.5` denotes a version range + with a known minimum and maximum version. + `>= 0.0.1` denotes a + version range with a known minimum, but no known maximum + """ + + +class SecurityVulnerabilityConnection(sgqlc.types.relay.Connection): + """The connection type for SecurityVulnerability.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("SecurityVulnerabilityEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of(SecurityVulnerability), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class SecurityVulnerabilityEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field(SecurityVulnerability, graphql_name="node") + """The item at the end of the edge.""" + + +class SetEnterpriseIdentityProviderPayload(sgqlc.types.Type): + """Autogenerated return type of SetEnterpriseIdentityProvider""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "identity_provider") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + identity_provider = sgqlc.types.Field("EnterpriseIdentityProvider", graphql_name="identityProvider") + """The identity provider for the enterprise.""" + + +class SetOrganizationInteractionLimitPayload(sgqlc.types.Type): + """Autogenerated return type of SetOrganizationInteractionLimit""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "organization") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + organization = sgqlc.types.Field("Organization", graphql_name="organization") + """The organization that the interaction limit was set for.""" + + +class SetRepositoryInteractionLimitPayload(sgqlc.types.Type): + """Autogenerated return type of SetRepositoryInteractionLimit""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "repository") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + repository = sgqlc.types.Field("Repository", graphql_name="repository") + """The repository that the interaction limit was set for.""" + + +class SetUserInteractionLimitPayload(sgqlc.types.Type): + """Autogenerated return type of SetUserInteractionLimit""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "user") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + user = sgqlc.types.Field("User", graphql_name="user") + """The user that the interaction limit was set for.""" + + +class SortBy(sgqlc.types.Type): + """Represents a sort by field and direction.""" + + __schema__ = github_schema + __field_names__ = ("direction", "field") + direction = sgqlc.types.Field(sgqlc.types.non_null(OrderDirection), graphql_name="direction") + """The direction of the sorting. Possible values are ASC and DESC.""" + + field = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="field") + """The id of the field by which the column is sorted.""" + + +class SponsorConnection(sgqlc.types.relay.Connection): + """The connection type for Sponsor.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("SponsorEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("Sponsor"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class SponsorEdge(sgqlc.types.Type): + """Represents a user or organization who is sponsoring someone in + GitHub Sponsors. + """ + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("Sponsor", graphql_name="node") + """The item at the end of the edge.""" + + +class Sponsorable(sgqlc.types.Interface): + """Entities that can be sponsored through GitHub Sponsors""" + + __schema__ = github_schema + __field_names__ = ( + "estimated_next_sponsors_payout_in_cents", + "has_sponsors_listing", + "is_sponsored_by", + "is_sponsoring_viewer", + "monthly_estimated_sponsors_income_in_cents", + "sponsoring", + "sponsors", + "sponsors_activities", + "sponsors_listing", + "sponsorship_for_viewer_as_sponsor", + "sponsorship_for_viewer_as_sponsorable", + "sponsorship_newsletters", + "sponsorships_as_maintainer", + "sponsorships_as_sponsor", + "viewer_can_sponsor", + "viewer_is_sponsoring", + ) + estimated_next_sponsors_payout_in_cents = sgqlc.types.Field( + sgqlc.types.non_null(Int), graphql_name="estimatedNextSponsorsPayoutInCents" + ) + """The estimated next GitHub Sponsors payout for this + user/organization in cents (USD). + """ + + has_sponsors_listing = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="hasSponsorsListing") + """True if this user/organization has a GitHub Sponsors listing.""" + + is_sponsored_by = sgqlc.types.Field( + sgqlc.types.non_null(Boolean), + graphql_name="isSponsoredBy", + args=sgqlc.types.ArgDict( + (("account_login", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="accountLogin", default=None)),) + ), + ) + """Check if the given account is sponsoring this user/organization. + + Arguments: + + * `account_login` (`String!`): The target account's login. + """ + + is_sponsoring_viewer = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isSponsoringViewer") + """True if the viewer is sponsored by this user/organization.""" + + monthly_estimated_sponsors_income_in_cents = sgqlc.types.Field( + sgqlc.types.non_null(Int), graphql_name="monthlyEstimatedSponsorsIncomeInCents" + ) + """The estimated monthly GitHub Sponsors income for this + user/organization in cents (USD). + """ + + sponsoring = sgqlc.types.Field( + sgqlc.types.non_null(SponsorConnection), + graphql_name="sponsoring", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("order_by", sgqlc.types.Arg(SponsorOrder, graphql_name="orderBy", default={"field": "RELEVANCE", "direction": "DESC"})), + ) + ), + ) + """List of users and organizations this entity is sponsoring. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `order_by` (`SponsorOrder`): Ordering options for the users and + organizations returned from the connection. (default: `{field: + RELEVANCE, direction: DESC}`) + """ + + sponsors = sgqlc.types.Field( + sgqlc.types.non_null(SponsorConnection), + graphql_name="sponsors", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("tier_id", sgqlc.types.Arg(ID, graphql_name="tierId", default=None)), + ("order_by", sgqlc.types.Arg(SponsorOrder, graphql_name="orderBy", default={"field": "RELEVANCE", "direction": "DESC"})), + ) + ), + ) + """List of sponsors for this user or organization. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `tier_id` (`ID`): If given, will filter for sponsors at the + given tier. Will only return sponsors whose tier the viewer is + permitted to see. + * `order_by` (`SponsorOrder`): Ordering options for sponsors + returned from the connection. (default: `{field: RELEVANCE, + direction: DESC}`) + """ + + sponsors_activities = sgqlc.types.Field( + sgqlc.types.non_null("SponsorsActivityConnection"), + graphql_name="sponsorsActivities", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("period", sgqlc.types.Arg(SponsorsActivityPeriod, graphql_name="period", default="MONTH")), + ( + "order_by", + sgqlc.types.Arg(SponsorsActivityOrder, graphql_name="orderBy", default={"field": "TIMESTAMP", "direction": "DESC"}), + ), + ) + ), + ) + """Events involving this sponsorable, such as new sponsorships. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `period` (`SponsorsActivityPeriod`): Filter activities returned + to only those that occurred in a given time range. (default: + `MONTH`) + * `order_by` (`SponsorsActivityOrder`): Ordering options for + activity returned from the connection. (default: `{field: + TIMESTAMP, direction: DESC}`) + """ + + sponsors_listing = sgqlc.types.Field("SponsorsListing", graphql_name="sponsorsListing") + """The GitHub Sponsors listing for this user or organization.""" + + sponsorship_for_viewer_as_sponsor = sgqlc.types.Field("Sponsorship", graphql_name="sponsorshipForViewerAsSponsor") + """The sponsorship from the viewer to this user/organization; that + is, the sponsorship where you're the sponsor. Only returns a + sponsorship if it is active. + """ + + sponsorship_for_viewer_as_sponsorable = sgqlc.types.Field("Sponsorship", graphql_name="sponsorshipForViewerAsSponsorable") + """The sponsorship from this user/organization to the viewer; that + is, the sponsorship you're receiving. Only returns a sponsorship + if it is active. + """ + + sponsorship_newsletters = sgqlc.types.Field( + sgqlc.types.non_null("SponsorshipNewsletterConnection"), + graphql_name="sponsorshipNewsletters", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ( + "order_by", + sgqlc.types.Arg( + SponsorshipNewsletterOrder, graphql_name="orderBy", default={"field": "CREATED_AT", "direction": "DESC"} + ), + ), + ) + ), + ) + """List of sponsorship updates sent from this sponsorable to + sponsors. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `order_by` (`SponsorshipNewsletterOrder`): Ordering options for + sponsorship updates returned from the connection. (default: + `{field: CREATED_AT, direction: DESC}`) + """ + + sponsorships_as_maintainer = sgqlc.types.Field( + sgqlc.types.non_null("SponsorshipConnection"), + graphql_name="sponsorshipsAsMaintainer", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("include_private", sgqlc.types.Arg(Boolean, graphql_name="includePrivate", default=False)), + ("order_by", sgqlc.types.Arg(SponsorshipOrder, graphql_name="orderBy", default=None)), + ) + ), + ) + """This object's sponsorships as the maintainer. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `include_private` (`Boolean`): Whether or not to include private + sponsorships in the result set (default: `false`) + * `order_by` (`SponsorshipOrder`): Ordering options for + sponsorships returned from this connection. If left blank, the + sponsorships will be ordered based on relevancy to the viewer. + """ + + sponsorships_as_sponsor = sgqlc.types.Field( + sgqlc.types.non_null("SponsorshipConnection"), + graphql_name="sponsorshipsAsSponsor", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("order_by", sgqlc.types.Arg(SponsorshipOrder, graphql_name="orderBy", default=None)), + ) + ), + ) + """This object's sponsorships as the sponsor. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `order_by` (`SponsorshipOrder`): Ordering options for + sponsorships returned from this connection. If left blank, the + sponsorships will be ordered based on relevancy to the viewer. + """ + + viewer_can_sponsor = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanSponsor") + """Whether or not the viewer is able to sponsor this + user/organization. + """ + + viewer_is_sponsoring = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerIsSponsoring") + """True if the viewer is sponsoring this user/organization.""" + + +class SponsorableItemConnection(sgqlc.types.relay.Connection): + """The connection type for SponsorableItem.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("SponsorableItemEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("SponsorableItem"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class SponsorableItemEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("SponsorableItem", graphql_name="node") + """The item at the end of the edge.""" + + +class SponsorsActivityConnection(sgqlc.types.relay.Connection): + """The connection type for SponsorsActivity.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("SponsorsActivityEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("SponsorsActivity"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class SponsorsActivityEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("SponsorsActivity", graphql_name="node") + """The item at the end of the edge.""" + + +class SponsorsGoal(sgqlc.types.Type): + """A goal associated with a GitHub Sponsors listing, representing a + target the sponsored maintainer would like to attain. + """ + + __schema__ = github_schema + __field_names__ = ("description", "kind", "percent_complete", "target_value", "title") + description = sgqlc.types.Field(String, graphql_name="description") + """A description of the goal from the maintainer.""" + + kind = sgqlc.types.Field(sgqlc.types.non_null(SponsorsGoalKind), graphql_name="kind") + """What the objective of this goal is.""" + + percent_complete = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="percentComplete") + """The percentage representing how complete this goal is, between + 0-100. + """ + + target_value = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="targetValue") + """What the goal amount is. Represents an amount in USD for monthly + sponsorship amount goals. Represents a count of unique sponsors + for total sponsors count goals. + """ + + title = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="title") + """A brief summary of the kind and target value of this goal.""" + + +class SponsorsTierAdminInfo(sgqlc.types.Type): + """SponsorsTier information only visible to users that can administer + the associated Sponsors listing. + """ + + __schema__ = github_schema + __field_names__ = ("sponsorships",) + sponsorships = sgqlc.types.Field( + sgqlc.types.non_null("SponsorshipConnection"), + graphql_name="sponsorships", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("include_private", sgqlc.types.Arg(Boolean, graphql_name="includePrivate", default=False)), + ("order_by", sgqlc.types.Arg(SponsorshipOrder, graphql_name="orderBy", default=None)), + ) + ), + ) + """The sponsorships associated with this tier. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `include_private` (`Boolean`): Whether or not to include private + sponsorships in the result set (default: `false`) + * `order_by` (`SponsorshipOrder`): Ordering options for + sponsorships returned from this connection. If left blank, the + sponsorships will be ordered based on relevancy to the viewer. + """ + + +class SponsorsTierConnection(sgqlc.types.relay.Connection): + """The connection type for SponsorsTier.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("SponsorsTierEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("SponsorsTier"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class SponsorsTierEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("SponsorsTier", graphql_name="node") + """The item at the end of the edge.""" + + +class SponsorshipConnection(sgqlc.types.relay.Connection): + """The connection type for Sponsorship.""" + + __schema__ = github_schema + __field_names__ = ( + "edges", + "nodes", + "page_info", + "total_count", + "total_recurring_monthly_price_in_cents", + "total_recurring_monthly_price_in_dollars", + ) + edges = sgqlc.types.Field(sgqlc.types.list_of("SponsorshipEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("Sponsorship"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + total_recurring_monthly_price_in_cents = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalRecurringMonthlyPriceInCents") + """The total amount in cents of all recurring sponsorships in the + connection whose amount you can view. Does not include one-time + sponsorships. + """ + + total_recurring_monthly_price_in_dollars = sgqlc.types.Field( + sgqlc.types.non_null(Int), graphql_name="totalRecurringMonthlyPriceInDollars" + ) + """The total amount in USD of all recurring sponsorships in the + connection whose amount you can view. Does not include one-time + sponsorships. + """ + + +class SponsorshipEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("Sponsorship", graphql_name="node") + """The item at the end of the edge.""" + + +class SponsorshipNewsletterConnection(sgqlc.types.relay.Connection): + """The connection type for SponsorshipNewsletter.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("SponsorshipNewsletterEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("SponsorshipNewsletter"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class SponsorshipNewsletterEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("SponsorshipNewsletter", graphql_name="node") + """The item at the end of the edge.""" + + +class StargazerConnection(sgqlc.types.relay.Connection): + """The connection type for User.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("StargazerEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("User"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class StargazerEdge(sgqlc.types.Type): + """Represents a user that's starred a repository.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node", "starred_at") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field(sgqlc.types.non_null("User"), graphql_name="node") + + starred_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="starredAt") + """Identifies when the item was starred.""" + + +class Starrable(sgqlc.types.Interface): + """Things that can be starred.""" + + __schema__ = github_schema + __field_names__ = ("id", "stargazer_count", "stargazers", "viewer_has_starred") + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + + stargazer_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="stargazerCount") + """Returns a count of how many stargazers there are on this object""" + + stargazers = sgqlc.types.Field( + sgqlc.types.non_null(StargazerConnection), + graphql_name="stargazers", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("order_by", sgqlc.types.Arg(StarOrder, graphql_name="orderBy", default=None)), + ) + ), + ) + """A list of users who have starred this starrable. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `order_by` (`StarOrder`): Order for connection + """ + + viewer_has_starred = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerHasStarred") + """Returns a boolean indicating whether the viewing user has starred + this starrable. + """ + + +class StarredRepositoryConnection(sgqlc.types.relay.Connection): + """The connection type for Repository.""" + + __schema__ = github_schema + __field_names__ = ("edges", "is_over_limit", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("StarredRepositoryEdge"), graphql_name="edges") + """A list of edges.""" + + is_over_limit = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isOverLimit") + """Is the list of stars for this user truncated? This is true for + users that have many stars. + """ + + nodes = sgqlc.types.Field(sgqlc.types.list_of("Repository"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class StarredRepositoryEdge(sgqlc.types.Type): + """Represents a starred repository.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node", "starred_at") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field(sgqlc.types.non_null("Repository"), graphql_name="node") + + starred_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="starredAt") + """Identifies when the item was starred.""" + + +class StartRepositoryMigrationPayload(sgqlc.types.Type): + """Autogenerated return type of StartRepositoryMigration""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "repository_migration") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + repository_migration = sgqlc.types.Field("RepositoryMigration", graphql_name="repositoryMigration") + """The new Octoshift repository migration.""" + + +class StatusCheckRollupContextConnection(sgqlc.types.relay.Connection): + """The connection type for StatusCheckRollupContext.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("StatusCheckRollupContextEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("StatusCheckRollupContext"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class StatusCheckRollupContextEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("StatusCheckRollupContext", graphql_name="node") + """The item at the end of the edge.""" + + +class SubmitPullRequestReviewPayload(sgqlc.types.Type): + """Autogenerated return type of SubmitPullRequestReview""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "pull_request_review") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + pull_request_review = sgqlc.types.Field("PullRequestReview", graphql_name="pullRequestReview") + """The submitted pull request review.""" + + +class Submodule(sgqlc.types.Type): + """A pointer to a repository at a specific revision embedded inside + another repository. + """ + + __schema__ = github_schema + __field_names__ = ("branch", "git_url", "name", "path", "subproject_commit_oid") + branch = sgqlc.types.Field(String, graphql_name="branch") + """The branch of the upstream submodule for tracking updates""" + + git_url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="gitUrl") + """The git URL of the submodule repository""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The name of the submodule in .gitmodules""" + + path = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="path") + """The path in the superproject that this submodule is located in""" + + subproject_commit_oid = sgqlc.types.Field(GitObjectID, graphql_name="subprojectCommitOid") + """The commit revision of the subproject repository being tracked by + the submodule + """ + + +class SubmoduleConnection(sgqlc.types.relay.Connection): + """The connection type for Submodule.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("SubmoduleEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of(Submodule), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class SubmoduleEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field(Submodule, graphql_name="node") + """The item at the end of the edge.""" + + +class Subscribable(sgqlc.types.Interface): + """Entities that can be subscribed to for web and email + notifications. + """ + + __schema__ = github_schema + __field_names__ = ("id", "viewer_can_subscribe", "viewer_subscription") + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + + viewer_can_subscribe = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanSubscribe") + """Check if the viewer is able to change their subscription status + for the repository. + """ + + viewer_subscription = sgqlc.types.Field(SubscriptionState, graphql_name="viewerSubscription") + """Identifies if the viewer is watching, not watching, or ignoring + the subscribable entity. + """ + + +class SuggestedReviewer(sgqlc.types.Type): + """A suggestion to review a pull request based on a user's commit + history and review comments. + """ + + __schema__ = github_schema + __field_names__ = ("is_author", "is_commenter", "reviewer") + is_author = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isAuthor") + """Is this suggestion based on past commits?""" + + is_commenter = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isCommenter") + """Is this suggestion based on past review comments?""" + + reviewer = sgqlc.types.Field(sgqlc.types.non_null("User"), graphql_name="reviewer") + """Identifies the user suggested to review the pull request.""" + + +class TeamAuditEntryData(sgqlc.types.Interface): + """Metadata for an audit entry with action team.*""" + + __schema__ = github_schema + __field_names__ = ("team", "team_name", "team_resource_path", "team_url") + team = sgqlc.types.Field("Team", graphql_name="team") + """The team associated with the action""" + + team_name = sgqlc.types.Field(String, graphql_name="teamName") + """The name of the team""" + + team_resource_path = sgqlc.types.Field(URI, graphql_name="teamResourcePath") + """The HTTP path for this team""" + + team_url = sgqlc.types.Field(URI, graphql_name="teamUrl") + """The HTTP URL for this team""" + + +class TeamConnection(sgqlc.types.relay.Connection): + """The connection type for Team.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("TeamEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("Team"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class TeamDiscussionCommentConnection(sgqlc.types.relay.Connection): + """The connection type for TeamDiscussionComment.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("TeamDiscussionCommentEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("TeamDiscussionComment"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class TeamDiscussionCommentEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("TeamDiscussionComment", graphql_name="node") + """The item at the end of the edge.""" + + +class TeamDiscussionConnection(sgqlc.types.relay.Connection): + """The connection type for TeamDiscussion.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("TeamDiscussionEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("TeamDiscussion"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class TeamDiscussionEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("TeamDiscussion", graphql_name="node") + """The item at the end of the edge.""" + + +class TeamEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("Team", graphql_name="node") + """The item at the end of the edge.""" + + +class TeamMemberConnection(sgqlc.types.relay.Connection): + """The connection type for User.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("TeamMemberEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("User"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class TeamMemberEdge(sgqlc.types.Type): + """Represents a user who is a member of a team.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "member_access_resource_path", "member_access_url", "node", "role") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + member_access_resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="memberAccessResourcePath") + """The HTTP path to the organization's member access page.""" + + member_access_url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="memberAccessUrl") + """The HTTP URL to the organization's member access page.""" + + node = sgqlc.types.Field(sgqlc.types.non_null("User"), graphql_name="node") + + role = sgqlc.types.Field(sgqlc.types.non_null(TeamMemberRole), graphql_name="role") + """The role the member has on the team.""" + + +class TeamRepositoryConnection(sgqlc.types.relay.Connection): + """The connection type for Repository.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("TeamRepositoryEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("Repository"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class TeamRepositoryEdge(sgqlc.types.Type): + """Represents a team repository.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node", "permission") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field(sgqlc.types.non_null("Repository"), graphql_name="node") + + permission = sgqlc.types.Field(sgqlc.types.non_null(RepositoryPermission), graphql_name="permission") + """The permission level the team has on the repository""" + + +class TextMatch(sgqlc.types.Type): + """A text match within a search result.""" + + __schema__ = github_schema + __field_names__ = ("fragment", "highlights", "property") + fragment = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="fragment") + """The specific text fragment within the property matched on.""" + + highlights = sgqlc.types.Field( + sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null("TextMatchHighlight"))), graphql_name="highlights" + ) + """Highlights within the matched fragment.""" + + property = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="property") + """The property matched on.""" + + +class TextMatchHighlight(sgqlc.types.Type): + """Represents a single highlight in a search result match.""" + + __schema__ = github_schema + __field_names__ = ("begin_indice", "end_indice", "text") + begin_indice = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="beginIndice") + """The indice in the fragment where the matched text begins.""" + + end_indice = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="endIndice") + """The indice in the fragment where the matched text ends.""" + + text = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="text") + """The text matched.""" + + +class TopicAuditEntryData(sgqlc.types.Interface): + """Metadata for an audit entry with a topic.""" + + __schema__ = github_schema + __field_names__ = ("topic", "topic_name") + topic = sgqlc.types.Field("Topic", graphql_name="topic") + """The name of the topic added to the repository""" + + topic_name = sgqlc.types.Field(String, graphql_name="topicName") + """The name of the topic added to the repository""" + + +class TransferIssuePayload(sgqlc.types.Type): + """Autogenerated return type of TransferIssue""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "issue") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + issue = sgqlc.types.Field("Issue", graphql_name="issue") + """The issue that was transferred""" + + +class TreeEntry(sgqlc.types.Type): + """Represents a Git tree entry.""" + + __schema__ = github_schema + __field_names__ = ( + "extension", + "is_generated", + "line_count", + "mode", + "name", + "object", + "oid", + "path", + "repository", + "submodule", + "type", + ) + extension = sgqlc.types.Field(String, graphql_name="extension") + """The extension of the file""" + + is_generated = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isGenerated") + """Whether or not this tree entry is generated""" + + line_count = sgqlc.types.Field(Int, graphql_name="lineCount") + """Number of lines in the file.""" + + mode = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="mode") + """Entry file mode.""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """Entry file name.""" + + object = sgqlc.types.Field(GitObject, graphql_name="object") + """Entry file object.""" + + oid = sgqlc.types.Field(sgqlc.types.non_null(GitObjectID), graphql_name="oid") + """Entry file Git object ID.""" + + path = sgqlc.types.Field(String, graphql_name="path") + """The full path of the file.""" + + repository = sgqlc.types.Field(sgqlc.types.non_null("Repository"), graphql_name="repository") + """The Repository the tree entry belongs to""" + + submodule = sgqlc.types.Field(Submodule, graphql_name="submodule") + """If the TreeEntry is for a directory occupied by a submodule + project, this returns the corresponding submodule + """ + + type = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="type") + """Entry file type.""" + + +class UnarchiveRepositoryPayload(sgqlc.types.Type): + """Autogenerated return type of UnarchiveRepository""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "repository") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + repository = sgqlc.types.Field("Repository", graphql_name="repository") + """The repository that was unarchived.""" + + +class UnfollowOrganizationPayload(sgqlc.types.Type): + """Autogenerated return type of UnfollowOrganization""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "organization") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + organization = sgqlc.types.Field("Organization", graphql_name="organization") + """The organization that was unfollowed.""" + + +class UnfollowUserPayload(sgqlc.types.Type): + """Autogenerated return type of UnfollowUser""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "user") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + user = sgqlc.types.Field("User", graphql_name="user") + """The user that was unfollowed.""" + + +class UniformResourceLocatable(sgqlc.types.Interface): + """Represents a type that can be retrieved by a URL.""" + + __schema__ = github_schema + __field_names__ = ("resource_path", "url") + resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="resourcePath") + """The HTML path to this resource.""" + + url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="url") + """The URL to this resource.""" + + +class UnlinkRepositoryFromProjectPayload(sgqlc.types.Type): + """Autogenerated return type of UnlinkRepositoryFromProject""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "project", "repository") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + project = sgqlc.types.Field("Project", graphql_name="project") + """The linked Project.""" + + repository = sgqlc.types.Field("Repository", graphql_name="repository") + """The linked Repository.""" + + +class UnlockLockablePayload(sgqlc.types.Type): + """Autogenerated return type of UnlockLockable""" + + __schema__ = github_schema + __field_names__ = ("actor", "client_mutation_id", "unlocked_record") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + unlocked_record = sgqlc.types.Field(Lockable, graphql_name="unlockedRecord") + """The item that was unlocked.""" + + +class UnmarkDiscussionCommentAsAnswerPayload(sgqlc.types.Type): + """Autogenerated return type of UnmarkDiscussionCommentAsAnswer""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "discussion") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + discussion = sgqlc.types.Field("Discussion", graphql_name="discussion") + """The discussion that includes the comment.""" + + +class UnmarkFileAsViewedPayload(sgqlc.types.Type): + """Autogenerated return type of UnmarkFileAsViewed""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "pull_request") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + pull_request = sgqlc.types.Field("PullRequest", graphql_name="pullRequest") + """The updated pull request.""" + + +class UnmarkIssueAsDuplicatePayload(sgqlc.types.Type): + """Autogenerated return type of UnmarkIssueAsDuplicate""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "duplicate") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + duplicate = sgqlc.types.Field("IssueOrPullRequest", graphql_name="duplicate") + """The issue or pull request that was marked as a duplicate.""" + + +class UnminimizeCommentPayload(sgqlc.types.Type): + """Autogenerated return type of UnminimizeComment""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "unminimized_comment") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + unminimized_comment = sgqlc.types.Field(Minimizable, graphql_name="unminimizedComment") + """The comment that was unminimized.""" + + +class UnpinIssuePayload(sgqlc.types.Type): + """Autogenerated return type of UnpinIssue""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "issue") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + issue = sgqlc.types.Field("Issue", graphql_name="issue") + """The issue that was unpinned""" + + +class UnresolveReviewThreadPayload(sgqlc.types.Type): + """Autogenerated return type of UnresolveReviewThread""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "thread") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + thread = sgqlc.types.Field("PullRequestReviewThread", graphql_name="thread") + """The thread to resolve.""" + + +class Updatable(sgqlc.types.Interface): + """Entities that can be updated.""" + + __schema__ = github_schema + __field_names__ = ("viewer_can_update",) + viewer_can_update = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanUpdate") + """Check if the current viewer can update this object.""" + + +class UpdatableComment(sgqlc.types.Interface): + """Comments that can be updated.""" + + __schema__ = github_schema + __field_names__ = ("viewer_cannot_update_reasons",) + viewer_cannot_update_reasons = sgqlc.types.Field( + sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(CommentCannotUpdateReason))), graphql_name="viewerCannotUpdateReasons" + ) + """Reasons why the current viewer can not update this comment.""" + + +class UpdateBranchProtectionRulePayload(sgqlc.types.Type): + """Autogenerated return type of UpdateBranchProtectionRule""" + + __schema__ = github_schema + __field_names__ = ("branch_protection_rule", "client_mutation_id") + branch_protection_rule = sgqlc.types.Field("BranchProtectionRule", graphql_name="branchProtectionRule") + """The newly created BranchProtectionRule.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateCheckRunPayload(sgqlc.types.Type): + """Autogenerated return type of UpdateCheckRun""" + + __schema__ = github_schema + __field_names__ = ("check_run", "client_mutation_id") + check_run = sgqlc.types.Field("CheckRun", graphql_name="checkRun") + """The updated check run.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + +class UpdateCheckSuitePreferencesPayload(sgqlc.types.Type): + """Autogenerated return type of UpdateCheckSuitePreferences""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "repository") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + repository = sgqlc.types.Field("Repository", graphql_name="repository") + """The updated repository.""" + + +class UpdateDiscussionCommentPayload(sgqlc.types.Type): + """Autogenerated return type of UpdateDiscussionComment""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "comment") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + comment = sgqlc.types.Field("DiscussionComment", graphql_name="comment") + """The modified discussion comment.""" + + +class UpdateDiscussionPayload(sgqlc.types.Type): + """Autogenerated return type of UpdateDiscussion""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "discussion") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + discussion = sgqlc.types.Field("Discussion", graphql_name="discussion") + """The modified discussion.""" + + +class UpdateEnterpriseAdministratorRolePayload(sgqlc.types.Type): + """Autogenerated return type of UpdateEnterpriseAdministratorRole""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "message") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + message = sgqlc.types.Field(String, graphql_name="message") + """A message confirming the result of changing the administrator's + role. + """ + + +class UpdateEnterpriseAllowPrivateRepositoryForkingSettingPayload(sgqlc.types.Type): + """Autogenerated return type of + UpdateEnterpriseAllowPrivateRepositoryForkingSetting + """ + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "enterprise", "message") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + enterprise = sgqlc.types.Field("Enterprise", graphql_name="enterprise") + """The enterprise with the updated allow private repository forking + setting. + """ + + message = sgqlc.types.Field(String, graphql_name="message") + """A message confirming the result of updating the allow private + repository forking setting. + """ + + +class UpdateEnterpriseDefaultRepositoryPermissionSettingPayload(sgqlc.types.Type): + """Autogenerated return type of + UpdateEnterpriseDefaultRepositoryPermissionSetting + """ + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "enterprise", "message") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + enterprise = sgqlc.types.Field("Enterprise", graphql_name="enterprise") + """The enterprise with the updated base repository permission + setting. + """ + + message = sgqlc.types.Field(String, graphql_name="message") + """A message confirming the result of updating the base repository + permission setting. + """ + + +class UpdateEnterpriseMembersCanChangeRepositoryVisibilitySettingPayload(sgqlc.types.Type): + """Autogenerated return type of + UpdateEnterpriseMembersCanChangeRepositoryVisibilitySetting + """ + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "enterprise", "message") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + enterprise = sgqlc.types.Field("Enterprise", graphql_name="enterprise") + """The enterprise with the updated members can change repository + visibility setting. + """ + + message = sgqlc.types.Field(String, graphql_name="message") + """A message confirming the result of updating the members can change + repository visibility setting. + """ + + +class UpdateEnterpriseMembersCanCreateRepositoriesSettingPayload(sgqlc.types.Type): + """Autogenerated return type of + UpdateEnterpriseMembersCanCreateRepositoriesSetting + """ + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "enterprise", "message") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + enterprise = sgqlc.types.Field("Enterprise", graphql_name="enterprise") + """The enterprise with the updated members can create repositories + setting. + """ + + message = sgqlc.types.Field(String, graphql_name="message") + """A message confirming the result of updating the members can create + repositories setting. + """ + + +class UpdateEnterpriseMembersCanDeleteIssuesSettingPayload(sgqlc.types.Type): + """Autogenerated return type of + UpdateEnterpriseMembersCanDeleteIssuesSetting + """ + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "enterprise", "message") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + enterprise = sgqlc.types.Field("Enterprise", graphql_name="enterprise") + """The enterprise with the updated members can delete issues setting.""" + + message = sgqlc.types.Field(String, graphql_name="message") + """A message confirming the result of updating the members can delete + issues setting. + """ + + +class UpdateEnterpriseMembersCanDeleteRepositoriesSettingPayload(sgqlc.types.Type): + """Autogenerated return type of + UpdateEnterpriseMembersCanDeleteRepositoriesSetting + """ + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "enterprise", "message") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + enterprise = sgqlc.types.Field("Enterprise", graphql_name="enterprise") + """The enterprise with the updated members can delete repositories + setting. + """ + + message = sgqlc.types.Field(String, graphql_name="message") + """A message confirming the result of updating the members can delete + repositories setting. + """ + + +class UpdateEnterpriseMembersCanInviteCollaboratorsSettingPayload(sgqlc.types.Type): + """Autogenerated return type of + UpdateEnterpriseMembersCanInviteCollaboratorsSetting + """ + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "enterprise", "message") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + enterprise = sgqlc.types.Field("Enterprise", graphql_name="enterprise") + """The enterprise with the updated members can invite collaborators + setting. + """ + + message = sgqlc.types.Field(String, graphql_name="message") + """A message confirming the result of updating the members can invite + collaborators setting. + """ + + +class UpdateEnterpriseMembersCanMakePurchasesSettingPayload(sgqlc.types.Type): + """Autogenerated return type of + UpdateEnterpriseMembersCanMakePurchasesSetting + """ + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "enterprise", "message") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + enterprise = sgqlc.types.Field("Enterprise", graphql_name="enterprise") + """The enterprise with the updated members can make purchases + setting. + """ + + message = sgqlc.types.Field(String, graphql_name="message") + """A message confirming the result of updating the members can make + purchases setting. + """ + + +class UpdateEnterpriseMembersCanUpdateProtectedBranchesSettingPayload(sgqlc.types.Type): + """Autogenerated return type of + UpdateEnterpriseMembersCanUpdateProtectedBranchesSetting + """ + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "enterprise", "message") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + enterprise = sgqlc.types.Field("Enterprise", graphql_name="enterprise") + """The enterprise with the updated members can update protected + branches setting. + """ + + message = sgqlc.types.Field(String, graphql_name="message") + """A message confirming the result of updating the members can update + protected branches setting. + """ + + +class UpdateEnterpriseMembersCanViewDependencyInsightsSettingPayload(sgqlc.types.Type): + """Autogenerated return type of + UpdateEnterpriseMembersCanViewDependencyInsightsSetting + """ + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "enterprise", "message") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + enterprise = sgqlc.types.Field("Enterprise", graphql_name="enterprise") + """The enterprise with the updated members can view dependency + insights setting. + """ + + message = sgqlc.types.Field(String, graphql_name="message") + """A message confirming the result of updating the members can view + dependency insights setting. + """ + + +class UpdateEnterpriseOrganizationProjectsSettingPayload(sgqlc.types.Type): + """Autogenerated return type of + UpdateEnterpriseOrganizationProjectsSetting + """ + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "enterprise", "message") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + enterprise = sgqlc.types.Field("Enterprise", graphql_name="enterprise") + """The enterprise with the updated organization projects setting.""" + + message = sgqlc.types.Field(String, graphql_name="message") + """A message confirming the result of updating the organization + projects setting. + """ + + +class UpdateEnterpriseOwnerOrganizationRolePayload(sgqlc.types.Type): + """Autogenerated return type of UpdateEnterpriseOwnerOrganizationRole""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "message") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + message = sgqlc.types.Field(String, graphql_name="message") + """A message confirming the result of changing the owner's + organization role. + """ + + +class UpdateEnterpriseProfilePayload(sgqlc.types.Type): + """Autogenerated return type of UpdateEnterpriseProfile""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "enterprise") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + enterprise = sgqlc.types.Field("Enterprise", graphql_name="enterprise") + """The updated enterprise.""" + + +class UpdateEnterpriseRepositoryProjectsSettingPayload(sgqlc.types.Type): + """Autogenerated return type of + UpdateEnterpriseRepositoryProjectsSetting + """ + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "enterprise", "message") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + enterprise = sgqlc.types.Field("Enterprise", graphql_name="enterprise") + """The enterprise with the updated repository projects setting.""" + + message = sgqlc.types.Field(String, graphql_name="message") + """A message confirming the result of updating the repository + projects setting. + """ + + +class UpdateEnterpriseTeamDiscussionsSettingPayload(sgqlc.types.Type): + """Autogenerated return type of + UpdateEnterpriseTeamDiscussionsSetting + """ + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "enterprise", "message") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + enterprise = sgqlc.types.Field("Enterprise", graphql_name="enterprise") + """The enterprise with the updated team discussions setting.""" + + message = sgqlc.types.Field(String, graphql_name="message") + """A message confirming the result of updating the team discussions + setting. + """ + + +class UpdateEnterpriseTwoFactorAuthenticationRequiredSettingPayload(sgqlc.types.Type): + """Autogenerated return type of + UpdateEnterpriseTwoFactorAuthenticationRequiredSetting + """ + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "enterprise", "message") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + enterprise = sgqlc.types.Field("Enterprise", graphql_name="enterprise") + """The enterprise with the updated two factor authentication required + setting. + """ + + message = sgqlc.types.Field(String, graphql_name="message") + """A message confirming the result of updating the two factor + authentication required setting. + """ + + +class UpdateEnvironmentPayload(sgqlc.types.Type): + """Autogenerated return type of UpdateEnvironment""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "environment") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + environment = sgqlc.types.Field("Environment", graphql_name="environment") + """The updated environment.""" + + +class UpdateIpAllowListEnabledSettingPayload(sgqlc.types.Type): + """Autogenerated return type of UpdateIpAllowListEnabledSetting""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "owner") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + owner = sgqlc.types.Field("IpAllowListOwner", graphql_name="owner") + """The IP allow list owner on which the setting was updated.""" + + +class UpdateIpAllowListEntryPayload(sgqlc.types.Type): + """Autogenerated return type of UpdateIpAllowListEntry""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "ip_allow_list_entry") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + ip_allow_list_entry = sgqlc.types.Field("IpAllowListEntry", graphql_name="ipAllowListEntry") + """The IP allow list entry that was updated.""" + + +class UpdateIpAllowListForInstalledAppsEnabledSettingPayload(sgqlc.types.Type): + """Autogenerated return type of + UpdateIpAllowListForInstalledAppsEnabledSetting + """ + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "owner") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + owner = sgqlc.types.Field("IpAllowListOwner", graphql_name="owner") + """The IP allow list owner on which the setting was updated.""" + + +class UpdateIssueCommentPayload(sgqlc.types.Type): + """Autogenerated return type of UpdateIssueComment""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "issue_comment") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + issue_comment = sgqlc.types.Field("IssueComment", graphql_name="issueComment") + """The updated comment.""" + + +class UpdateIssuePayload(sgqlc.types.Type): + """Autogenerated return type of UpdateIssue""" + + __schema__ = github_schema + __field_names__ = ("actor", "client_mutation_id", "issue") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + issue = sgqlc.types.Field("Issue", graphql_name="issue") + """The issue.""" + + +class UpdateNotificationRestrictionSettingPayload(sgqlc.types.Type): + """Autogenerated return type of UpdateNotificationRestrictionSetting""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "owner") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + owner = sgqlc.types.Field("VerifiableDomainOwner", graphql_name="owner") + """The owner on which the setting was updated.""" + + +class UpdateOrganizationAllowPrivateRepositoryForkingSettingPayload(sgqlc.types.Type): + """Autogenerated return type of + UpdateOrganizationAllowPrivateRepositoryForkingSetting + """ + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "message", "organization") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + message = sgqlc.types.Field(String, graphql_name="message") + """A message confirming the result of updating the allow private + repository forking setting. + """ + + organization = sgqlc.types.Field("Organization", graphql_name="organization") + """The organization with the updated allow private repository forking + setting. + """ + + +class UpdateProjectCardPayload(sgqlc.types.Type): + """Autogenerated return type of UpdateProjectCard""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "project_card") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + project_card = sgqlc.types.Field("ProjectCard", graphql_name="projectCard") + """The updated ProjectCard.""" + + +class UpdateProjectColumnPayload(sgqlc.types.Type): + """Autogenerated return type of UpdateProjectColumn""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "project_column") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + project_column = sgqlc.types.Field("ProjectColumn", graphql_name="projectColumn") + """The updated project column.""" + + +class UpdateProjectDraftIssuePayload(sgqlc.types.Type): + """Autogenerated return type of UpdateProjectDraftIssue""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "draft_issue") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + draft_issue = sgqlc.types.Field("DraftIssue", graphql_name="draftIssue") + """The draft issue updated in the project.""" + + +class UpdateProjectNextItemFieldPayload(sgqlc.types.Type): + """Autogenerated return type of UpdateProjectNextItemField""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "project_next_item") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + project_next_item = sgqlc.types.Field("ProjectNextItem", graphql_name="projectNextItem") + """The updated item.""" + + +class UpdateProjectNextPayload(sgqlc.types.Type): + """Autogenerated return type of UpdateProjectNext""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "project_next") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + project_next = sgqlc.types.Field("ProjectNext", graphql_name="projectNext") + """The updated Project.""" + + +class UpdateProjectPayload(sgqlc.types.Type): + """Autogenerated return type of UpdateProject""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "project") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + project = sgqlc.types.Field("Project", graphql_name="project") + """The updated project.""" + + +class UpdatePullRequestBranchPayload(sgqlc.types.Type): + """Autogenerated return type of UpdatePullRequestBranch""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "pull_request") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + pull_request = sgqlc.types.Field("PullRequest", graphql_name="pullRequest") + """The updated pull request.""" + + +class UpdatePullRequestPayload(sgqlc.types.Type): + """Autogenerated return type of UpdatePullRequest""" + + __schema__ = github_schema + __field_names__ = ("actor", "client_mutation_id", "pull_request") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + pull_request = sgqlc.types.Field("PullRequest", graphql_name="pullRequest") + """The updated pull request.""" + + +class UpdatePullRequestReviewCommentPayload(sgqlc.types.Type): + """Autogenerated return type of UpdatePullRequestReviewComment""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "pull_request_review_comment") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + pull_request_review_comment = sgqlc.types.Field("PullRequestReviewComment", graphql_name="pullRequestReviewComment") + """The updated comment.""" + + +class UpdatePullRequestReviewPayload(sgqlc.types.Type): + """Autogenerated return type of UpdatePullRequestReview""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "pull_request_review") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + pull_request_review = sgqlc.types.Field("PullRequestReview", graphql_name="pullRequestReview") + """The updated pull request review.""" + + +class UpdateRefPayload(sgqlc.types.Type): + """Autogenerated return type of UpdateRef""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "ref") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + ref = sgqlc.types.Field("Ref", graphql_name="ref") + """The updated Ref.""" + + +class UpdateRepositoryPayload(sgqlc.types.Type): + """Autogenerated return type of UpdateRepository""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "repository") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + repository = sgqlc.types.Field("Repository", graphql_name="repository") + """The updated repository.""" + + +class UpdateSponsorshipPreferencesPayload(sgqlc.types.Type): + """Autogenerated return type of UpdateSponsorshipPreferences""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "sponsorship") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + sponsorship = sgqlc.types.Field("Sponsorship", graphql_name="sponsorship") + """The sponsorship that was updated.""" + + +class UpdateSubscriptionPayload(sgqlc.types.Type): + """Autogenerated return type of UpdateSubscription""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "subscribable") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + subscribable = sgqlc.types.Field(Subscribable, graphql_name="subscribable") + """The input subscribable entity.""" + + +class UpdateTeamDiscussionCommentPayload(sgqlc.types.Type): + """Autogenerated return type of UpdateTeamDiscussionComment""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "team_discussion_comment") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + team_discussion_comment = sgqlc.types.Field("TeamDiscussionComment", graphql_name="teamDiscussionComment") + """The updated comment.""" + + +class UpdateTeamDiscussionPayload(sgqlc.types.Type): + """Autogenerated return type of UpdateTeamDiscussion""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "team_discussion") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + team_discussion = sgqlc.types.Field("TeamDiscussion", graphql_name="teamDiscussion") + """The updated discussion.""" + + +class UpdateTeamsRepositoryPayload(sgqlc.types.Type): + """Autogenerated return type of UpdateTeamsRepository""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "repository", "teams") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + repository = sgqlc.types.Field("Repository", graphql_name="repository") + """The repository that was updated.""" + + teams = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null("Team")), graphql_name="teams") + """The teams granted permission on the repository.""" + + +class UpdateTopicsPayload(sgqlc.types.Type): + """Autogenerated return type of UpdateTopics""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "invalid_topic_names", "repository") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + invalid_topic_names = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(String)), graphql_name="invalidTopicNames") + """Names of the provided topics that are not valid.""" + + repository = sgqlc.types.Field("Repository", graphql_name="repository") + """The updated repository.""" + + +class UserConnection(sgqlc.types.relay.Connection): + """The connection type for User.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("UserEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("User"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class UserContentEditConnection(sgqlc.types.relay.Connection): + """A list of edits to content.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("UserContentEditEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("UserContentEdit"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class UserContentEditEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("UserContentEdit", graphql_name="node") + """The item at the end of the edge.""" + + +class UserEdge(sgqlc.types.Type): + """Represents a user.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("User", graphql_name="node") + """The item at the end of the edge.""" + + +class UserEmailMetadata(sgqlc.types.Type): + """Email attributes from External Identity""" + + __schema__ = github_schema + __field_names__ = ("primary", "type", "value") + primary = sgqlc.types.Field(Boolean, graphql_name="primary") + """Boolean to identify primary emails""" + + type = sgqlc.types.Field(String, graphql_name="type") + """Type of email""" + + value = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="value") + """Email id""" + + +class UserStatusConnection(sgqlc.types.relay.Connection): + """The connection type for UserStatus.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("UserStatusEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("UserStatus"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class UserStatusEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("UserStatus", graphql_name="node") + """The item at the end of the edge.""" + + +class VerifiableDomainConnection(sgqlc.types.relay.Connection): + """The connection type for VerifiableDomain.""" + + __schema__ = github_schema + __field_names__ = ("edges", "nodes", "page_info", "total_count") + edges = sgqlc.types.Field(sgqlc.types.list_of("VerifiableDomainEdge"), graphql_name="edges") + """A list of edges.""" + + nodes = sgqlc.types.Field(sgqlc.types.list_of("VerifiableDomain"), graphql_name="nodes") + """A list of nodes.""" + + page_info = sgqlc.types.Field(sgqlc.types.non_null(PageInfo), graphql_name="pageInfo") + """Information to aid in pagination.""" + + total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount") + """Identifies the total count of items in the connection.""" + + +class VerifiableDomainEdge(sgqlc.types.Type): + """An edge in a connection.""" + + __schema__ = github_schema + __field_names__ = ("cursor", "node") + cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + """A cursor for use in pagination.""" + + node = sgqlc.types.Field("VerifiableDomain", graphql_name="node") + """The item at the end of the edge.""" + + +class VerifyVerifiableDomainPayload(sgqlc.types.Type): + """Autogenerated return type of VerifyVerifiableDomain""" + + __schema__ = github_schema + __field_names__ = ("client_mutation_id", "domain") + client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId") + """A unique identifier for the client performing the mutation.""" + + domain = sgqlc.types.Field("VerifiableDomain", graphql_name="domain") + """The verifiable domain that was verified.""" + + +class Votable(sgqlc.types.Interface): + """A subject that may be upvoted.""" + + __schema__ = github_schema + __field_names__ = ("upvote_count", "viewer_can_upvote", "viewer_has_upvoted") + upvote_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="upvoteCount") + """Number of upvotes that this subject has received.""" + + viewer_can_upvote = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanUpvote") + """Whether or not the current user can add or remove an upvote on + this subject. + """ + + viewer_has_upvoted = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerHasUpvoted") + """Whether or not the current user has already upvoted this subject.""" + + +class AddedToProjectEvent(sgqlc.types.Type, Node): + """Represents a 'added_to_project' event on a given issue or pull + request. + """ + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "database_id") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + +class App(sgqlc.types.Type, Node): + """A GitHub App.""" + + __schema__ = github_schema + __field_names__ = ( + "created_at", + "database_id", + "description", + "ip_allow_list_entries", + "logo_background_color", + "logo_url", + "name", + "slug", + "updated_at", + "url", + ) + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + description = sgqlc.types.Field(String, graphql_name="description") + """The description of the app.""" + + ip_allow_list_entries = sgqlc.types.Field( + sgqlc.types.non_null(IpAllowListEntryConnection), + graphql_name="ipAllowListEntries", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ( + "order_by", + sgqlc.types.Arg( + IpAllowListEntryOrder, graphql_name="orderBy", default={"field": "ALLOW_LIST_VALUE", "direction": "ASC"} + ), + ), + ) + ), + ) + """The IP addresses of the app. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `order_by` (`IpAllowListEntryOrder`): Ordering options for IP + allow list entries returned. (default: `{field: + ALLOW_LIST_VALUE, direction: ASC}`) + """ + + logo_background_color = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="logoBackgroundColor") + """The hex color code, without the leading '#', for the logo + background. + """ + + logo_url = sgqlc.types.Field( + sgqlc.types.non_null(URI), + graphql_name="logoUrl", + args=sgqlc.types.ArgDict((("size", sgqlc.types.Arg(Int, graphql_name="size", default=None)),)), + ) + """A URL pointing to the app's logo. + + Arguments: + + * `size` (`Int`): The size of the resulting image. + """ + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The name of the app.""" + + slug = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="slug") + """A slug based on the name of the app for use in URLs.""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="url") + """The URL to the app's homepage.""" + + +class AssignedEvent(sgqlc.types.Type, Node): + """Represents an 'assigned' event on any assignable object.""" + + __schema__ = github_schema + __field_names__ = ("actor", "assignable", "assignee", "created_at") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + assignable = sgqlc.types.Field(sgqlc.types.non_null(Assignable), graphql_name="assignable") + """Identifies the assignable associated with the event.""" + + assignee = sgqlc.types.Field("Assignee", graphql_name="assignee") + """Identifies the user or mannequin that was assigned.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + +class AutoMergeDisabledEvent(sgqlc.types.Type, Node): + """Represents a 'auto_merge_disabled' event on a given pull request.""" + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "disabler", "pull_request", "reason", "reason_code") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + disabler = sgqlc.types.Field("User", graphql_name="disabler") + """The user who disabled auto-merge for this Pull Request""" + + pull_request = sgqlc.types.Field("PullRequest", graphql_name="pullRequest") + """PullRequest referenced by event""" + + reason = sgqlc.types.Field(String, graphql_name="reason") + """The reason auto-merge was disabled""" + + reason_code = sgqlc.types.Field(String, graphql_name="reasonCode") + """The reason_code relating to why auto-merge was disabled""" + + +class AutoMergeEnabledEvent(sgqlc.types.Type, Node): + """Represents a 'auto_merge_enabled' event on a given pull request.""" + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "enabler", "pull_request") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + enabler = sgqlc.types.Field("User", graphql_name="enabler") + """The user who enabled auto-merge for this Pull Request""" + + pull_request = sgqlc.types.Field("PullRequest", graphql_name="pullRequest") + """PullRequest referenced by event.""" + + +class AutoRebaseEnabledEvent(sgqlc.types.Type, Node): + """Represents a 'auto_rebase_enabled' event on a given pull request.""" + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "enabler", "pull_request") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + enabler = sgqlc.types.Field("User", graphql_name="enabler") + """The user who enabled auto-merge (rebase) for this Pull Request""" + + pull_request = sgqlc.types.Field("PullRequest", graphql_name="pullRequest") + """PullRequest referenced by event.""" + + +class AutoSquashEnabledEvent(sgqlc.types.Type, Node): + """Represents a 'auto_squash_enabled' event on a given pull request.""" + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "enabler", "pull_request") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + enabler = sgqlc.types.Field("User", graphql_name="enabler") + """The user who enabled auto-merge (squash) for this Pull Request""" + + pull_request = sgqlc.types.Field("PullRequest", graphql_name="pullRequest") + """PullRequest referenced by event.""" + + +class AutomaticBaseChangeFailedEvent(sgqlc.types.Type, Node): + """Represents a 'automatic_base_change_failed' event on a given pull + request. + """ + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "new_base", "old_base", "pull_request") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + new_base = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="newBase") + """The new base for this PR""" + + old_base = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="oldBase") + """The old base for this PR""" + + pull_request = sgqlc.types.Field(sgqlc.types.non_null("PullRequest"), graphql_name="pullRequest") + """PullRequest referenced by event.""" + + +class AutomaticBaseChangeSucceededEvent(sgqlc.types.Type, Node): + """Represents a 'automatic_base_change_succeeded' event on a given + pull request. + """ + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "new_base", "old_base", "pull_request") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + new_base = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="newBase") + """The new base for this PR""" + + old_base = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="oldBase") + """The old base for this PR""" + + pull_request = sgqlc.types.Field(sgqlc.types.non_null("PullRequest"), graphql_name="pullRequest") + """PullRequest referenced by event.""" + + +class BaseRefChangedEvent(sgqlc.types.Type, Node): + """Represents a 'base_ref_changed' event on a given issue or pull + request. + """ + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "current_ref_name", "database_id", "previous_ref_name", "pull_request") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + current_ref_name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="currentRefName") + """Identifies the name of the base ref for the pull request after it + was changed. + """ + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + previous_ref_name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="previousRefName") + """Identifies the name of the base ref for the pull request before it + was changed. + """ + + pull_request = sgqlc.types.Field(sgqlc.types.non_null("PullRequest"), graphql_name="pullRequest") + """PullRequest referenced by event.""" + + +class BaseRefDeletedEvent(sgqlc.types.Type, Node): + """Represents a 'base_ref_deleted' event on a given pull request.""" + + __schema__ = github_schema + __field_names__ = ("actor", "base_ref_name", "created_at", "pull_request") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + base_ref_name = sgqlc.types.Field(String, graphql_name="baseRefName") + """Identifies the name of the Ref associated with the + `base_ref_deleted` event. + """ + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + pull_request = sgqlc.types.Field("PullRequest", graphql_name="pullRequest") + """PullRequest referenced by event.""" + + +class BaseRefForcePushedEvent(sgqlc.types.Type, Node): + """Represents a 'base_ref_force_pushed' event on a given pull + request. + """ + + __schema__ = github_schema + __field_names__ = ("actor", "after_commit", "before_commit", "created_at", "pull_request", "ref") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + after_commit = sgqlc.types.Field("Commit", graphql_name="afterCommit") + """Identifies the after commit SHA for the 'base_ref_force_pushed' + event. + """ + + before_commit = sgqlc.types.Field("Commit", graphql_name="beforeCommit") + """Identifies the before commit SHA for the 'base_ref_force_pushed' + event. + """ + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + pull_request = sgqlc.types.Field(sgqlc.types.non_null("PullRequest"), graphql_name="pullRequest") + """PullRequest referenced by event.""" + + ref = sgqlc.types.Field("Ref", graphql_name="ref") + """Identifies the fully qualified ref name for the + 'base_ref_force_pushed' event. + """ + + +class Blob(sgqlc.types.Type, GitObject, Node): + """Represents a Git blob.""" + + __schema__ = github_schema + __field_names__ = ("byte_size", "is_binary", "is_truncated", "text") + byte_size = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="byteSize") + """Byte size of Blob object""" + + is_binary = sgqlc.types.Field(Boolean, graphql_name="isBinary") + """Indicates whether the Blob is binary or text. Returns null if + unable to determine the encoding. + """ + + is_truncated = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isTruncated") + """Indicates whether the contents is truncated""" + + text = sgqlc.types.Field(String, graphql_name="text") + """UTF8 text data or null if the Blob is binary""" + + +class Bot(sgqlc.types.Type, Node, Actor, UniformResourceLocatable): + """A special type of user which takes actions on behalf of GitHub + Apps. + """ + + __schema__ = github_schema + __field_names__ = ("created_at", "database_id", "updated_at") + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + +class BranchProtectionRule(sgqlc.types.Type, Node): + """A branch protection rule.""" + + __schema__ = github_schema + __field_names__ = ( + "allows_deletions", + "allows_force_pushes", + "blocks_creations", + "branch_protection_rule_conflicts", + "bypass_force_push_allowances", + "bypass_pull_request_allowances", + "creator", + "database_id", + "dismisses_stale_reviews", + "is_admin_enforced", + "matching_refs", + "pattern", + "push_allowances", + "repository", + "required_approving_review_count", + "required_status_check_contexts", + "required_status_checks", + "requires_approving_reviews", + "requires_code_owner_reviews", + "requires_commit_signatures", + "requires_conversation_resolution", + "requires_linear_history", + "requires_status_checks", + "requires_strict_status_checks", + "restricts_pushes", + "restricts_review_dismissals", + "review_dismissal_allowances", + ) + allows_deletions = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="allowsDeletions") + """Can this branch be deleted.""" + + allows_force_pushes = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="allowsForcePushes") + """Are force pushes allowed on this branch.""" + + blocks_creations = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="blocksCreations") + """Is branch creation a protected operation.""" + + branch_protection_rule_conflicts = sgqlc.types.Field( + sgqlc.types.non_null(BranchProtectionRuleConflictConnection), + graphql_name="branchProtectionRuleConflicts", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of conflicts matching branches protection rule and other + branch protection rules + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + bypass_force_push_allowances = sgqlc.types.Field( + sgqlc.types.non_null(BypassForcePushAllowanceConnection), + graphql_name="bypassForcePushAllowances", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of actors able to force push for this branch protection + rule. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + bypass_pull_request_allowances = sgqlc.types.Field( + sgqlc.types.non_null(BypassPullRequestAllowanceConnection), + graphql_name="bypassPullRequestAllowances", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of actors able to bypass PRs for this branch protection + rule. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + creator = sgqlc.types.Field(Actor, graphql_name="creator") + """The actor who created this branch protection rule.""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + dismisses_stale_reviews = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="dismissesStaleReviews") + """Will new commits pushed to matching branches dismiss pull request + review approvals. + """ + + is_admin_enforced = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isAdminEnforced") + """Can admins overwrite branch protection.""" + + matching_refs = sgqlc.types.Field( + sgqlc.types.non_null(RefConnection), + graphql_name="matchingRefs", + args=sgqlc.types.ArgDict( + ( + ("query", sgqlc.types.Arg(String, graphql_name="query", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """Repository refs that are protected by this rule + + Arguments: + + * `query` (`String`): Filters refs with query on name + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + pattern = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="pattern") + """Identifies the protection rule pattern.""" + + push_allowances = sgqlc.types.Field( + sgqlc.types.non_null(PushAllowanceConnection), + graphql_name="pushAllowances", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list push allowances for this branch protection rule. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + repository = sgqlc.types.Field("Repository", graphql_name="repository") + """The repository associated with this branch protection rule.""" + + required_approving_review_count = sgqlc.types.Field(Int, graphql_name="requiredApprovingReviewCount") + """Number of approving reviews required to update matching branches.""" + + required_status_check_contexts = sgqlc.types.Field(sgqlc.types.list_of(String), graphql_name="requiredStatusCheckContexts") + """List of required status check contexts that must pass for commits + to be accepted to matching branches. + """ + + required_status_checks = sgqlc.types.Field( + sgqlc.types.list_of(sgqlc.types.non_null(RequiredStatusCheckDescription)), graphql_name="requiredStatusChecks" + ) + """List of required status checks that must pass for commits to be + accepted to matching branches. + """ + + requires_approving_reviews = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="requiresApprovingReviews") + """Are approving reviews required to update matching branches.""" + + requires_code_owner_reviews = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="requiresCodeOwnerReviews") + """Are reviews from code owners required to update matching branches.""" + + requires_commit_signatures = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="requiresCommitSignatures") + """Are commits required to be signed.""" + + requires_conversation_resolution = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="requiresConversationResolution") + """Are conversations required to be resolved before merging.""" + + requires_linear_history = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="requiresLinearHistory") + """Are merge commits prohibited from being pushed to this branch.""" + + requires_status_checks = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="requiresStatusChecks") + """Are status checks required to update matching branches.""" + + requires_strict_status_checks = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="requiresStrictStatusChecks") + """Are branches required to be up to date before merging.""" + + restricts_pushes = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="restrictsPushes") + """Is pushing to matching branches restricted.""" + + restricts_review_dismissals = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="restrictsReviewDismissals") + """Is dismissal of pull request reviews restricted.""" + + review_dismissal_allowances = sgqlc.types.Field( + sgqlc.types.non_null(ReviewDismissalAllowanceConnection), + graphql_name="reviewDismissalAllowances", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list review dismissal allowances for this branch protection + rule. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + +class BypassForcePushAllowance(sgqlc.types.Type, Node): + """A user, team, or app who has the ability to bypass a force push + requirement on a protected branch. + """ + + __schema__ = github_schema + __field_names__ = ("actor", "branch_protection_rule") + actor = sgqlc.types.Field("BranchActorAllowanceActor", graphql_name="actor") + """The actor that can force push.""" + + branch_protection_rule = sgqlc.types.Field(BranchProtectionRule, graphql_name="branchProtectionRule") + """Identifies the branch protection rule associated with the allowed + user, team, or app. + """ + + +class BypassPullRequestAllowance(sgqlc.types.Type, Node): + """A user, team, or app who has the ability to bypass a pull request + requirement on a protected branch. + """ + + __schema__ = github_schema + __field_names__ = ("actor", "branch_protection_rule") + actor = sgqlc.types.Field("BranchActorAllowanceActor", graphql_name="actor") + """The actor that can bypass.""" + + branch_protection_rule = sgqlc.types.Field(BranchProtectionRule, graphql_name="branchProtectionRule") + """Identifies the branch protection rule associated with the allowed + user, team, or app. + """ + + +class CWE(sgqlc.types.Type, Node): + """A common weakness enumeration""" + + __schema__ = github_schema + __field_names__ = ("cwe_id", "description", "name") + cwe_id = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cweId") + """The id of the CWE""" + + description = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="description") + """A detailed description of this CWE""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The name of this CWE""" + + +class CheckRun(sgqlc.types.Type, Node, UniformResourceLocatable, RequirableByPullRequest): + """A check run.""" + + __schema__ = github_schema + __field_names__ = ( + "annotations", + "check_suite", + "completed_at", + "conclusion", + "database_id", + "deployment", + "details_url", + "external_id", + "name", + "pending_deployment_request", + "permalink", + "repository", + "started_at", + "status", + "steps", + "summary", + "text", + "title", + ) + annotations = sgqlc.types.Field( + CheckAnnotationConnection, + graphql_name="annotations", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """The check run's annotations + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + check_suite = sgqlc.types.Field(sgqlc.types.non_null("CheckSuite"), graphql_name="checkSuite") + """The check suite that this run is a part of.""" + + completed_at = sgqlc.types.Field(DateTime, graphql_name="completedAt") + """Identifies the date and time when the check run was completed.""" + + conclusion = sgqlc.types.Field(CheckConclusionState, graphql_name="conclusion") + """The conclusion of the check run.""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + deployment = sgqlc.types.Field("Deployment", graphql_name="deployment") + """The corresponding deployment for this job, if any""" + + details_url = sgqlc.types.Field(URI, graphql_name="detailsUrl") + """The URL from which to find full details of the check run on the + integrator's site. + """ + + external_id = sgqlc.types.Field(String, graphql_name="externalId") + """A reference for the check run on the integrator's system.""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The name of the check for this check run.""" + + pending_deployment_request = sgqlc.types.Field(DeploymentRequest, graphql_name="pendingDeploymentRequest") + """Information about a pending deployment, if any, in this check run""" + + permalink = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="permalink") + """The permalink to the check run summary.""" + + repository = sgqlc.types.Field(sgqlc.types.non_null("Repository"), graphql_name="repository") + """The repository associated with this check run.""" + + started_at = sgqlc.types.Field(DateTime, graphql_name="startedAt") + """Identifies the date and time when the check run was started.""" + + status = sgqlc.types.Field(sgqlc.types.non_null(CheckStatusState), graphql_name="status") + """The current status of the check run.""" + + steps = sgqlc.types.Field( + CheckStepConnection, + graphql_name="steps", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("number", sgqlc.types.Arg(Int, graphql_name="number", default=None)), + ) + ), + ) + """The check run's steps + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `number` (`Int`): Step number + """ + + summary = sgqlc.types.Field(String, graphql_name="summary") + """A string representing the check run's summary""" + + text = sgqlc.types.Field(String, graphql_name="text") + """A string representing the check run's text""" + + title = sgqlc.types.Field(String, graphql_name="title") + """A string representing the check run""" + + +class CheckSuite(sgqlc.types.Type, Node): + """A check suite.""" + + __schema__ = github_schema + __field_names__ = ( + "app", + "branch", + "check_runs", + "commit", + "conclusion", + "created_at", + "creator", + "database_id", + "matching_pull_requests", + "push", + "repository", + "resource_path", + "status", + "updated_at", + "url", + "workflow_run", + ) + app = sgqlc.types.Field(App, graphql_name="app") + """The GitHub App which created this check suite.""" + + branch = sgqlc.types.Field("Ref", graphql_name="branch") + """The name of the branch for this check suite.""" + + check_runs = sgqlc.types.Field( + CheckRunConnection, + graphql_name="checkRuns", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("filter_by", sgqlc.types.Arg(CheckRunFilter, graphql_name="filterBy", default=None)), + ) + ), + ) + """The check runs associated with a check suite. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `filter_by` (`CheckRunFilter`): Filters the check runs by this + type. + """ + + commit = sgqlc.types.Field(sgqlc.types.non_null("Commit"), graphql_name="commit") + """The commit for this check suite""" + + conclusion = sgqlc.types.Field(CheckConclusionState, graphql_name="conclusion") + """The conclusion of this check suite.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + creator = sgqlc.types.Field("User", graphql_name="creator") + """The user who triggered the check suite.""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + matching_pull_requests = sgqlc.types.Field( + PullRequestConnection, + graphql_name="matchingPullRequests", + args=sgqlc.types.ArgDict( + ( + ( + "states", + sgqlc.types.Arg(sgqlc.types.list_of(sgqlc.types.non_null(PullRequestState)), graphql_name="states", default=None), + ), + ("labels", sgqlc.types.Arg(sgqlc.types.list_of(sgqlc.types.non_null(String)), graphql_name="labels", default=None)), + ("head_ref_name", sgqlc.types.Arg(String, graphql_name="headRefName", default=None)), + ("base_ref_name", sgqlc.types.Arg(String, graphql_name="baseRefName", default=None)), + ("order_by", sgqlc.types.Arg(IssueOrder, graphql_name="orderBy", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of open pull requests matching the check suite. + + Arguments: + + * `states` (`[PullRequestState!]`): A list of states to filter the + pull requests by. + * `labels` (`[String!]`): A list of label names to filter the pull + requests by. + * `head_ref_name` (`String`): The head ref name to filter the pull + requests by. + * `base_ref_name` (`String`): The base ref name to filter the pull + requests by. + * `order_by` (`IssueOrder`): Ordering options for pull requests + returned from the connection. + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + push = sgqlc.types.Field("Push", graphql_name="push") + """The push that triggered this check suite.""" + + repository = sgqlc.types.Field(sgqlc.types.non_null("Repository"), graphql_name="repository") + """The repository associated with this check suite.""" + + resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="resourcePath") + """The HTTP path for this check suite""" + + status = sgqlc.types.Field(sgqlc.types.non_null(CheckStatusState), graphql_name="status") + """The status of this check suite.""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="url") + """The HTTP URL for this check suite""" + + workflow_run = sgqlc.types.Field("WorkflowRun", graphql_name="workflowRun") + """The workflow run associated with this check suite.""" + + +class ClosedEvent(sgqlc.types.Type, Node, UniformResourceLocatable): + """Represents a 'closed' event on any `Closable`.""" + + __schema__ = github_schema + __field_names__ = ("actor", "closable", "closer", "created_at", "state_reason") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + closable = sgqlc.types.Field(sgqlc.types.non_null(Closable), graphql_name="closable") + """Object that was closed.""" + + closer = sgqlc.types.Field("Closer", graphql_name="closer") + """Object which triggered the creation of this event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + state_reason = sgqlc.types.Field(IssueStateReason, graphql_name="stateReason") + """The reason the issue state was changed to closed.""" + + +class CodeOfConduct(sgqlc.types.Type, Node): + """The Code of Conduct for a repository""" + + __schema__ = github_schema + __field_names__ = ("body", "key", "name", "resource_path", "url") + body = sgqlc.types.Field(String, graphql_name="body") + """The body of the Code of Conduct""" + + key = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="key") + """The key for the Code of Conduct""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The formal name of the Code of Conduct""" + + resource_path = sgqlc.types.Field(URI, graphql_name="resourcePath") + """The HTTP path for this Code of Conduct""" + + url = sgqlc.types.Field(URI, graphql_name="url") + """The HTTP URL for this Code of Conduct""" + + +class CommentDeletedEvent(sgqlc.types.Type, Node): + """Represents a 'comment_deleted' event on a given issue or pull + request. + """ + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "database_id", "deleted_comment_author") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + deleted_comment_author = sgqlc.types.Field(Actor, graphql_name="deletedCommentAuthor") + """The user who authored the deleted comment.""" + + +class Commit(sgqlc.types.Type, Node, GitObject, Subscribable, UniformResourceLocatable): + """Represents a Git commit.""" + + __schema__ = github_schema + __field_names__ = ( + "additions", + "associated_pull_requests", + "author", + "authored_by_committer", + "authored_date", + "authors", + "blame", + "changed_files", + "check_suites", + "comments", + "committed_date", + "committed_via_web", + "committer", + "deletions", + "deployments", + "file", + "history", + "message", + "message_body", + "message_body_html", + "message_headline", + "message_headline_html", + "on_behalf_of", + "parents", + "pushed_date", + "signature", + "status", + "status_check_rollup", + "submodules", + "tarball_url", + "tree", + "tree_resource_path", + "tree_url", + "zipball_url", + ) + additions = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="additions") + """The number of additions in this commit.""" + + associated_pull_requests = sgqlc.types.Field( + PullRequestConnection, + graphql_name="associatedPullRequests", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ( + "order_by", + sgqlc.types.Arg(PullRequestOrder, graphql_name="orderBy", default={"field": "CREATED_AT", "direction": "ASC"}), + ), + ) + ), + ) + """The merged Pull Request that introduced the commit to the + repository. If the commit is not present in the default branch, + additionally returns open Pull Requests associated with the commit + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `order_by` (`PullRequestOrder`): Ordering options for pull + requests. (default: `{field: CREATED_AT, direction: ASC}`) + """ + + author = sgqlc.types.Field(GitActor, graphql_name="author") + """Authorship details of the commit.""" + + authored_by_committer = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="authoredByCommitter") + """Check if the committer and the author match.""" + + authored_date = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="authoredDate") + """The datetime when this commit was authored.""" + + authors = sgqlc.types.Field( + sgqlc.types.non_null(GitActorConnection), + graphql_name="authors", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """The list of authors for this commit based on the git author and + the Co-authored-by message trailer. The git author will always be + first. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + blame = sgqlc.types.Field( + sgqlc.types.non_null(Blame), + graphql_name="blame", + args=sgqlc.types.ArgDict((("path", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="path", default=None)),)), + ) + """Fetches `git blame` information. + + Arguments: + + * `path` (`String!`): The file whose Git blame information you + want. + """ + + changed_files = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="changedFiles") + """The number of changed files in this commit.""" + + check_suites = sgqlc.types.Field( + CheckSuiteConnection, + graphql_name="checkSuites", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("filter_by", sgqlc.types.Arg(CheckSuiteFilter, graphql_name="filterBy", default=None)), + ) + ), + ) + """The check suites associated with a commit. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `filter_by` (`CheckSuiteFilter`): Filters the check suites by + this type. + """ + + comments = sgqlc.types.Field( + sgqlc.types.non_null(CommitCommentConnection), + graphql_name="comments", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """Comments made on the commit. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + committed_date = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="committedDate") + """The datetime when this commit was committed.""" + + committed_via_web = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="committedViaWeb") + """Check if committed via GitHub web UI.""" + + committer = sgqlc.types.Field(GitActor, graphql_name="committer") + """Committer details of the commit.""" + + deletions = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="deletions") + """The number of deletions in this commit.""" + + deployments = sgqlc.types.Field( + DeploymentConnection, + graphql_name="deployments", + args=sgqlc.types.ArgDict( + ( + ( + "environments", + sgqlc.types.Arg(sgqlc.types.list_of(sgqlc.types.non_null(String)), graphql_name="environments", default=None), + ), + ("order_by", sgqlc.types.Arg(DeploymentOrder, graphql_name="orderBy", default={"field": "CREATED_AT", "direction": "ASC"})), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """The deployments associated with a commit. + + Arguments: + + * `environments` (`[String!]`): Environments to list deployments + for + * `order_by` (`DeploymentOrder`): Ordering options for deployments + returned from the connection. (default: `{field: CREATED_AT, + direction: ASC}`) + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + file = sgqlc.types.Field( + TreeEntry, + graphql_name="file", + args=sgqlc.types.ArgDict((("path", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="path", default=None)),)), + ) + """The tree entry representing the file located at the given path. + + Arguments: + + * `path` (`String!`): The path for the file + """ + + history = sgqlc.types.Field( + sgqlc.types.non_null(CommitHistoryConnection), + graphql_name="history", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("path", sgqlc.types.Arg(String, graphql_name="path", default=None)), + ("author", sgqlc.types.Arg(CommitAuthor, graphql_name="author", default=None)), + ("since", sgqlc.types.Arg(GitTimestamp, graphql_name="since", default=None)), + ("until", sgqlc.types.Arg(GitTimestamp, graphql_name="until", default=None)), + ) + ), + ) + """The linear commit history starting from (and including) this + commit, in the same order as `git log`. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `path` (`String`): If non-null, filters history to only show + commits touching files under this path. + * `author` (`CommitAuthor`): If non-null, filters history to only + show commits with matching authorship. + * `since` (`GitTimestamp`): Allows specifying a beginning time or + date for fetching commits. + * `until` (`GitTimestamp`): Allows specifying an ending time or + date for fetching commits. + """ + + message = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="message") + """The Git commit message""" + + message_body = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="messageBody") + """The Git commit message body""" + + message_body_html = sgqlc.types.Field(sgqlc.types.non_null(HTML), graphql_name="messageBodyHTML") + """The commit message body rendered to HTML.""" + + message_headline = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="messageHeadline") + """The Git commit message headline""" + + message_headline_html = sgqlc.types.Field(sgqlc.types.non_null(HTML), graphql_name="messageHeadlineHTML") + """The commit message headline rendered to HTML.""" + + on_behalf_of = sgqlc.types.Field("Organization", graphql_name="onBehalfOf") + """The organization this commit was made on behalf of.""" + + parents = sgqlc.types.Field( + sgqlc.types.non_null(CommitConnection), + graphql_name="parents", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """The parents of a commit. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + pushed_date = sgqlc.types.Field(DateTime, graphql_name="pushedDate") + """The datetime when this commit was pushed.""" + + signature = sgqlc.types.Field(GitSignature, graphql_name="signature") + """Commit signing information, if present.""" + + status = sgqlc.types.Field("Status", graphql_name="status") + """Status information for this commit""" + + status_check_rollup = sgqlc.types.Field("StatusCheckRollup", graphql_name="statusCheckRollup") + """Check and Status rollup information for this commit.""" + + submodules = sgqlc.types.Field( + sgqlc.types.non_null(SubmoduleConnection), + graphql_name="submodules", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """Returns a list of all submodules in this repository as of this + Commit parsed from the .gitmodules file. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + tarball_url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="tarballUrl") + """Returns a URL to download a tarball archive for a repository. + Note: For private repositories, these links are temporary and + expire after five minutes. + """ + + tree = sgqlc.types.Field(sgqlc.types.non_null("Tree"), graphql_name="tree") + """Commit's root Tree""" + + tree_resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="treeResourcePath") + """The HTTP path for the tree of this commit""" + + tree_url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="treeUrl") + """The HTTP URL for the tree of this commit""" + + zipball_url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="zipballUrl") + """Returns a URL to download a zipball archive for a repository. + Note: For private repositories, these links are temporary and + expire after five minutes. + """ + + +class CommitComment(sgqlc.types.Type, Node, Comment, Deletable, Minimizable, Updatable, UpdatableComment, Reactable, RepositoryNode): + """Represents a comment on a given Commit.""" + + __schema__ = github_schema + __field_names__ = ("commit", "path", "position", "resource_path", "url") + commit = sgqlc.types.Field(Commit, graphql_name="commit") + """Identifies the commit associated with the comment, if the commit + exists. + """ + + path = sgqlc.types.Field(String, graphql_name="path") + """Identifies the file path associated with the comment.""" + + position = sgqlc.types.Field(Int, graphql_name="position") + """Identifies the line position associated with the comment.""" + + resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="resourcePath") + """The HTTP path permalink for this commit comment.""" + + url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="url") + """The HTTP URL permalink for this commit comment.""" + + +class CommitCommentThread(sgqlc.types.Type, Node, RepositoryNode): + """A thread of comments on a commit.""" + + __schema__ = github_schema + __field_names__ = ("comments", "commit", "path", "position") + comments = sgqlc.types.Field( + sgqlc.types.non_null(CommitCommentConnection), + graphql_name="comments", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """The comments that exist in this thread. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + commit = sgqlc.types.Field(Commit, graphql_name="commit") + """The commit the comments were made on.""" + + path = sgqlc.types.Field(String, graphql_name="path") + """The file the comments were made on.""" + + position = sgqlc.types.Field(Int, graphql_name="position") + """The position in the diff for the commit that the comment was made + on. + """ + + +class ConnectedEvent(sgqlc.types.Type, Node): + """Represents a 'connected' event on a given issue or pull request.""" + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "is_cross_repository", "source", "subject") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + is_cross_repository = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isCrossRepository") + """Reference originated in a different repository.""" + + source = sgqlc.types.Field(sgqlc.types.non_null("ReferencedSubject"), graphql_name="source") + """Issue or pull request that made the reference.""" + + subject = sgqlc.types.Field(sgqlc.types.non_null("ReferencedSubject"), graphql_name="subject") + """Issue or pull request which was connected.""" + + +class ConvertToDraftEvent(sgqlc.types.Type, Node, UniformResourceLocatable): + """Represents a 'convert_to_draft' event on a given pull request.""" + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "pull_request") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + pull_request = sgqlc.types.Field(sgqlc.types.non_null("PullRequest"), graphql_name="pullRequest") + """PullRequest referenced by event.""" + + +class ConvertedNoteToIssueEvent(sgqlc.types.Type, Node): + """Represents a 'converted_note_to_issue' event on a given issue or + pull request. + """ + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "database_id") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + +class ConvertedToDiscussionEvent(sgqlc.types.Type, Node): + """Represents a 'converted_to_discussion' event on a given issue.""" + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "discussion") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + discussion = sgqlc.types.Field("Discussion", graphql_name="discussion") + """The discussion that the issue was converted into.""" + + +class CreatedCommitContribution(sgqlc.types.Type, Contribution): + """Represents the contribution a user made by committing to a + repository. + """ + + __schema__ = github_schema + __field_names__ = ("commit_count", "repository") + commit_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="commitCount") + """How many commits were made on this day to this repository by the + user. + """ + + repository = sgqlc.types.Field(sgqlc.types.non_null("Repository"), graphql_name="repository") + """The repository the user made a commit in.""" + + +class CreatedIssueContribution(sgqlc.types.Type, Contribution): + """Represents the contribution a user made on GitHub by opening an + issue. + """ + + __schema__ = github_schema + __field_names__ = ("issue",) + issue = sgqlc.types.Field(sgqlc.types.non_null("Issue"), graphql_name="issue") + """The issue that was opened.""" + + +class CreatedPullRequestContribution(sgqlc.types.Type, Contribution): + """Represents the contribution a user made on GitHub by opening a + pull request. + """ + + __schema__ = github_schema + __field_names__ = ("pull_request",) + pull_request = sgqlc.types.Field(sgqlc.types.non_null("PullRequest"), graphql_name="pullRequest") + """The pull request that was opened.""" + + +class CreatedPullRequestReviewContribution(sgqlc.types.Type, Contribution): + """Represents the contribution a user made by leaving a review on a + pull request. + """ + + __schema__ = github_schema + __field_names__ = ("pull_request", "pull_request_review", "repository") + pull_request = sgqlc.types.Field(sgqlc.types.non_null("PullRequest"), graphql_name="pullRequest") + """The pull request the user reviewed.""" + + pull_request_review = sgqlc.types.Field(sgqlc.types.non_null("PullRequestReview"), graphql_name="pullRequestReview") + """The review the user left on the pull request.""" + + repository = sgqlc.types.Field(sgqlc.types.non_null("Repository"), graphql_name="repository") + """The repository containing the pull request that the user reviewed.""" + + +class CreatedRepositoryContribution(sgqlc.types.Type, Contribution): + """Represents the contribution a user made on GitHub by creating a + repository. + """ + + __schema__ = github_schema + __field_names__ = ("repository",) + repository = sgqlc.types.Field(sgqlc.types.non_null("Repository"), graphql_name="repository") + """The repository that was created.""" + + +class CrossReferencedEvent(sgqlc.types.Type, UniformResourceLocatable, Node): + """Represents a mention made by one issue or pull request to another.""" + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "is_cross_repository", "referenced_at", "source", "target", "will_close_target") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + is_cross_repository = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isCrossRepository") + """Reference originated in a different repository.""" + + referenced_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="referencedAt") + """Identifies when the reference was made.""" + + source = sgqlc.types.Field(sgqlc.types.non_null("ReferencedSubject"), graphql_name="source") + """Issue or pull request that made the reference.""" + + target = sgqlc.types.Field(sgqlc.types.non_null("ReferencedSubject"), graphql_name="target") + """Issue or pull request to which the reference was made.""" + + will_close_target = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="willCloseTarget") + """Checks if the target will be closed when the source is merged.""" + + +class DemilestonedEvent(sgqlc.types.Type, Node): + """Represents a 'demilestoned' event on a given issue or pull + request. + """ + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "milestone_title", "subject") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + milestone_title = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="milestoneTitle") + """Identifies the milestone title associated with the 'demilestoned' + event. + """ + + subject = sgqlc.types.Field(sgqlc.types.non_null("MilestoneItem"), graphql_name="subject") + """Object referenced by event.""" + + +class DependabotUpdate(sgqlc.types.Type, RepositoryNode): + """A Dependabot Update for a dependency in a repository""" + + __schema__ = github_schema + __field_names__ = ("error", "pull_request") + error = sgqlc.types.Field(DependabotUpdateError, graphql_name="error") + """The error from a dependency update""" + + pull_request = sgqlc.types.Field("PullRequest", graphql_name="pullRequest") + """The associated pull request""" + + +class DeployKey(sgqlc.types.Type, Node): + """A repository deploy key.""" + + __schema__ = github_schema + __field_names__ = ("created_at", "key", "read_only", "title", "verified") + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + key = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="key") + """The deploy key.""" + + read_only = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="readOnly") + """Whether or not the deploy key is read only.""" + + title = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="title") + """The deploy key title.""" + + verified = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="verified") + """Whether or not the deploy key has been verified.""" + + +class DeployedEvent(sgqlc.types.Type, Node): + """Represents a 'deployed' event on a given pull request.""" + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "database_id", "deployment", "pull_request", "ref") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + deployment = sgqlc.types.Field(sgqlc.types.non_null("Deployment"), graphql_name="deployment") + """The deployment associated with the 'deployed' event.""" + + pull_request = sgqlc.types.Field(sgqlc.types.non_null("PullRequest"), graphql_name="pullRequest") + """PullRequest referenced by event.""" + + ref = sgqlc.types.Field("Ref", graphql_name="ref") + """The ref associated with the 'deployed' event.""" + + +class Deployment(sgqlc.types.Type, Node): + """Represents triggered deployment instance.""" + + __schema__ = github_schema + __field_names__ = ( + "commit", + "commit_oid", + "created_at", + "creator", + "database_id", + "description", + "environment", + "latest_environment", + "latest_status", + "original_environment", + "payload", + "ref", + "repository", + "state", + "statuses", + "task", + "updated_at", + ) + commit = sgqlc.types.Field(Commit, graphql_name="commit") + """Identifies the commit sha of the deployment.""" + + commit_oid = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="commitOid") + """Identifies the oid of the deployment commit, even if the commit + has been deleted. + """ + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + creator = sgqlc.types.Field(sgqlc.types.non_null(Actor), graphql_name="creator") + """Identifies the actor who triggered the deployment.""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + description = sgqlc.types.Field(String, graphql_name="description") + """The deployment description.""" + + environment = sgqlc.types.Field(String, graphql_name="environment") + """The latest environment to which this deployment was made.""" + + latest_environment = sgqlc.types.Field(String, graphql_name="latestEnvironment") + """The latest environment to which this deployment was made.""" + + latest_status = sgqlc.types.Field("DeploymentStatus", graphql_name="latestStatus") + """The latest status of this deployment.""" + + original_environment = sgqlc.types.Field(String, graphql_name="originalEnvironment") + """The original environment to which this deployment was made.""" + + payload = sgqlc.types.Field(String, graphql_name="payload") + """Extra information that a deployment system might need.""" + + ref = sgqlc.types.Field("Ref", graphql_name="ref") + """Identifies the Ref of the deployment, if the deployment was + created by ref. + """ + + repository = sgqlc.types.Field(sgqlc.types.non_null("Repository"), graphql_name="repository") + """Identifies the repository associated with the deployment.""" + + state = sgqlc.types.Field(DeploymentState, graphql_name="state") + """The current state of the deployment.""" + + statuses = sgqlc.types.Field( + DeploymentStatusConnection, + graphql_name="statuses", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of statuses associated with the deployment. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + task = sgqlc.types.Field(String, graphql_name="task") + """The deployment task.""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + +class DeploymentEnvironmentChangedEvent(sgqlc.types.Type, Node): + """Represents a 'deployment_environment_changed' event on a given + pull request. + """ + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "deployment_status", "pull_request") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + deployment_status = sgqlc.types.Field(sgqlc.types.non_null("DeploymentStatus"), graphql_name="deploymentStatus") + """The deployment status that updated the deployment environment.""" + + pull_request = sgqlc.types.Field(sgqlc.types.non_null("PullRequest"), graphql_name="pullRequest") + """PullRequest referenced by event.""" + + +class DeploymentReview(sgqlc.types.Type, Node): + """A deployment review.""" + + __schema__ = github_schema + __field_names__ = ("comment", "database_id", "environments", "state", "user") + comment = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="comment") + """The comment the user left.""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + environments = sgqlc.types.Field( + sgqlc.types.non_null(EnvironmentConnection), + graphql_name="environments", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """The environments approved or rejected + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + state = sgqlc.types.Field(sgqlc.types.non_null(DeploymentReviewState), graphql_name="state") + """The decision of the user.""" + + user = sgqlc.types.Field(sgqlc.types.non_null("User"), graphql_name="user") + """The user that reviewed the deployment.""" + + +class DeploymentStatus(sgqlc.types.Type, Node): + """Describes the status of a given deployment attempt.""" + + __schema__ = github_schema + __field_names__ = ("created_at", "creator", "deployment", "description", "environment_url", "log_url", "state", "updated_at") + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + creator = sgqlc.types.Field(sgqlc.types.non_null(Actor), graphql_name="creator") + """Identifies the actor who triggered the deployment.""" + + deployment = sgqlc.types.Field(sgqlc.types.non_null(Deployment), graphql_name="deployment") + """Identifies the deployment associated with status.""" + + description = sgqlc.types.Field(String, graphql_name="description") + """Identifies the description of the deployment.""" + + environment_url = sgqlc.types.Field(URI, graphql_name="environmentUrl") + """Identifies the environment URL of the deployment.""" + + log_url = sgqlc.types.Field(URI, graphql_name="logUrl") + """Identifies the log URL of the deployment.""" + + state = sgqlc.types.Field(sgqlc.types.non_null(DeploymentStatusState), graphql_name="state") + """Identifies the current state of the deployment.""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + +class DisconnectedEvent(sgqlc.types.Type, Node): + """Represents a 'disconnected' event on a given issue or pull + request. + """ + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "is_cross_repository", "source", "subject") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + is_cross_repository = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isCrossRepository") + """Reference originated in a different repository.""" + + source = sgqlc.types.Field(sgqlc.types.non_null("ReferencedSubject"), graphql_name="source") + """Issue or pull request from which the issue was disconnected.""" + + subject = sgqlc.types.Field(sgqlc.types.non_null("ReferencedSubject"), graphql_name="subject") + """Issue or pull request which was disconnected.""" + + +class Discussion( + sgqlc.types.Type, Comment, Updatable, Deletable, Labelable, Lockable, RepositoryNode, Subscribable, Reactable, Votable, Node +): + """A discussion in a repository.""" + + __schema__ = github_schema + __field_names__ = ( + "answer", + "answer_chosen_at", + "answer_chosen_by", + "category", + "comments", + "number", + "poll", + "resource_path", + "title", + "url", + ) + answer = sgqlc.types.Field("DiscussionComment", graphql_name="answer") + """The comment chosen as this discussion's answer, if any.""" + + answer_chosen_at = sgqlc.types.Field(DateTime, graphql_name="answerChosenAt") + """The time when a user chose this discussion's answer, if answered.""" + + answer_chosen_by = sgqlc.types.Field(Actor, graphql_name="answerChosenBy") + """The user who chose this discussion's answer, if answered.""" + + category = sgqlc.types.Field(sgqlc.types.non_null("DiscussionCategory"), graphql_name="category") + """The category for this discussion.""" + + comments = sgqlc.types.Field( + sgqlc.types.non_null(DiscussionCommentConnection), + graphql_name="comments", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """The replies to the discussion. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + number = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="number") + """The number identifying this discussion within the repository.""" + + poll = sgqlc.types.Field("DiscussionPoll", graphql_name="poll") + """The poll associated with this discussion, if one exists.""" + + resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="resourcePath") + """The path for this discussion.""" + + title = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="title") + """The title of this discussion.""" + + url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="url") + """The URL for this discussion.""" + + +class DiscussionCategory(sgqlc.types.Type, Node, RepositoryNode): + """A category for discussions in a repository.""" + + __schema__ = github_schema + __field_names__ = ("created_at", "description", "emoji", "emoji_html", "is_answerable", "name", "updated_at") + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + description = sgqlc.types.Field(String, graphql_name="description") + """A description of this category.""" + + emoji = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="emoji") + """An emoji representing this category.""" + + emoji_html = sgqlc.types.Field(sgqlc.types.non_null(HTML), graphql_name="emojiHTML") + """This category's emoji rendered as HTML.""" + + is_answerable = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isAnswerable") + """Whether or not discussions in this category support choosing an + answer with the markDiscussionCommentAsAnswer mutation. + """ + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The name of this category.""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + +class DiscussionComment(sgqlc.types.Type, Comment, Deletable, Minimizable, Updatable, UpdatableComment, Reactable, Votable, Node): + """A comment on a discussion.""" + + __schema__ = github_schema + __field_names__ = ( + "deleted_at", + "discussion", + "is_answer", + "replies", + "reply_to", + "resource_path", + "url", + "viewer_can_mark_as_answer", + "viewer_can_unmark_as_answer", + ) + deleted_at = sgqlc.types.Field(DateTime, graphql_name="deletedAt") + """The time when this replied-to comment was deleted""" + + discussion = sgqlc.types.Field(Discussion, graphql_name="discussion") + """The discussion this comment was created in""" + + is_answer = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isAnswer") + """Has this comment been chosen as the answer of its discussion?""" + + replies = sgqlc.types.Field( + sgqlc.types.non_null(DiscussionCommentConnection), + graphql_name="replies", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """The threaded replies to this comment. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + reply_to = sgqlc.types.Field("DiscussionComment", graphql_name="replyTo") + """The discussion comment this comment is a reply to""" + + resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="resourcePath") + """The path for this discussion comment.""" + + url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="url") + """The URL for this discussion comment.""" + + viewer_can_mark_as_answer = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanMarkAsAnswer") + """Can the current user mark this comment as an answer?""" + + viewer_can_unmark_as_answer = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanUnmarkAsAnswer") + """Can the current user unmark this comment as an answer?""" + + +class DiscussionPoll(sgqlc.types.Type, Node): + """A poll for a discussion.""" + + __schema__ = github_schema + __field_names__ = ("discussion", "options", "question", "total_vote_count", "viewer_can_vote", "viewer_has_voted") + discussion = sgqlc.types.Field(Discussion, graphql_name="discussion") + """The discussion that this poll belongs to.""" + + options = sgqlc.types.Field( + DiscussionPollOptionConnection, + graphql_name="options", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ( + "order_by", + sgqlc.types.Arg( + DiscussionPollOptionOrder, graphql_name="orderBy", default={"field": "AUTHORED_ORDER", "direction": "ASC"} + ), + ), + ) + ), + ) + """The options for this poll. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `order_by` (`DiscussionPollOptionOrder`): How to order the + options for the discussion poll. (default: `{field: + AUTHORED_ORDER, direction: ASC}`) + """ + + question = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="question") + """The question that is being asked by this poll.""" + + total_vote_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalVoteCount") + """The total number of votes that have been cast for this poll.""" + + viewer_can_vote = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanVote") + """Indicates if the viewer has permission to vote in this poll.""" + + viewer_has_voted = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerHasVoted") + """Indicates if the viewer has voted for any option in this poll.""" + + +class DiscussionPollOption(sgqlc.types.Type, Node): + """An option for a discussion poll.""" + + __schema__ = github_schema + __field_names__ = ("option", "poll", "total_vote_count", "viewer_has_voted") + option = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="option") + """The text for this option.""" + + poll = sgqlc.types.Field(DiscussionPoll, graphql_name="poll") + """The discussion poll that this option belongs to.""" + + total_vote_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalVoteCount") + """The total number of votes that have been cast for this option.""" + + viewer_has_voted = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerHasVoted") + """Indicates if the viewer has voted for this option in the poll.""" + + +class DraftIssue(sgqlc.types.Type, Node): + """A draft issue within a project.""" + + __schema__ = github_schema + __field_names__ = ( + "assignees", + "body", + "body_html", + "body_text", + "created_at", + "creator", + "project", + "project_item", + "title", + "updated_at", + ) + assignees = sgqlc.types.Field( + sgqlc.types.non_null(UserConnection), + graphql_name="assignees", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of users to assigned to this draft issue. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + body = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="body") + """The body of the draft issue.""" + + body_html = sgqlc.types.Field(sgqlc.types.non_null(HTML), graphql_name="bodyHTML") + """The body of the draft issue rendered to HTML.""" + + body_text = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="bodyText") + """The body of the draft issue rendered to text.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + creator = sgqlc.types.Field(Actor, graphql_name="creator") + """The actor who created this draft issue.""" + + project = sgqlc.types.Field(sgqlc.types.non_null("ProjectNext"), graphql_name="project") + """The project (beta) that contains this draft issue.""" + + project_item = sgqlc.types.Field(sgqlc.types.non_null("ProjectNextItem"), graphql_name="projectItem") + """The project (beta) item that wraps this draft issue.""" + + title = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="title") + """The title of the draft issue""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + +class Enterprise(sgqlc.types.Type, Node): + """An account to manage multiple organizations with consolidated + policy and billing. + """ + + __schema__ = github_schema + __field_names__ = ( + "avatar_url", + "billing_info", + "created_at", + "database_id", + "description", + "description_html", + "location", + "members", + "name", + "organizations", + "owner_info", + "resource_path", + "slug", + "url", + "viewer_is_admin", + "website_url", + ) + avatar_url = sgqlc.types.Field( + sgqlc.types.non_null(URI), + graphql_name="avatarUrl", + args=sgqlc.types.ArgDict((("size", sgqlc.types.Arg(Int, graphql_name="size", default=None)),)), + ) + """A URL pointing to the enterprise's public avatar. + + Arguments: + + * `size` (`Int`): The size of the resulting square image. + """ + + billing_info = sgqlc.types.Field(EnterpriseBillingInfo, graphql_name="billingInfo") + """Enterprise billing informationĀ visible to enterprise billing + managers. + """ + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + description = sgqlc.types.Field(String, graphql_name="description") + """The description of the enterprise.""" + + description_html = sgqlc.types.Field(sgqlc.types.non_null(HTML), graphql_name="descriptionHTML") + """The description of the enterprise as HTML.""" + + location = sgqlc.types.Field(String, graphql_name="location") + """The location of the enterprise.""" + + members = sgqlc.types.Field( + sgqlc.types.non_null(EnterpriseMemberConnection), + graphql_name="members", + args=sgqlc.types.ArgDict( + ( + ( + "organization_logins", + sgqlc.types.Arg(sgqlc.types.list_of(sgqlc.types.non_null(String)), graphql_name="organizationLogins", default=None), + ), + ("query", sgqlc.types.Arg(String, graphql_name="query", default=None)), + ( + "order_by", + sgqlc.types.Arg(EnterpriseMemberOrder, graphql_name="orderBy", default={"field": "LOGIN", "direction": "ASC"}), + ), + ("role", sgqlc.types.Arg(EnterpriseUserAccountMembershipRole, graphql_name="role", default=None)), + ("deployment", sgqlc.types.Arg(EnterpriseUserDeployment, graphql_name="deployment", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of users who are members of this enterprise. + + Arguments: + + * `organization_logins` (`[String!]`): Only return members within + the organizations with these logins + * `query` (`String`): The search string to look for. + * `order_by` (`EnterpriseMemberOrder`): Ordering options for + members returned from the connection. (default: `{field: LOGIN, + direction: ASC}`) + * `role` (`EnterpriseUserAccountMembershipRole`): The role of the + user in the enterprise organization or server. + * `deployment` (`EnterpriseUserDeployment`): Only return members + within the selected GitHub Enterprise deployment + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The name of the enterprise.""" + + organizations = sgqlc.types.Field( + sgqlc.types.non_null(OrganizationConnection), + graphql_name="organizations", + args=sgqlc.types.ArgDict( + ( + ("query", sgqlc.types.Arg(String, graphql_name="query", default=None)), + ("viewer_organization_role", sgqlc.types.Arg(RoleInOrganization, graphql_name="viewerOrganizationRole", default=None)), + ("order_by", sgqlc.types.Arg(OrganizationOrder, graphql_name="orderBy", default={"field": "LOGIN", "direction": "ASC"})), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of organizations that belong to this enterprise. + + Arguments: + + * `query` (`String`): The search string to look for. + * `viewer_organization_role` (`RoleInOrganization`): The viewer's + role in an organization. + * `order_by` (`OrganizationOrder`): Ordering options for + organizations returned from the connection. (default: `{field: + LOGIN, direction: ASC}`) + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + owner_info = sgqlc.types.Field(EnterpriseOwnerInfo, graphql_name="ownerInfo") + """Enterprise information only visible to enterprise owners.""" + + resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="resourcePath") + """The HTTP path for this enterprise.""" + + slug = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="slug") + """The URL-friendly identifier for the enterprise.""" + + url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="url") + """The HTTP URL for this enterprise.""" + + viewer_is_admin = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerIsAdmin") + """Is the current viewer an admin of this enterprise?""" + + website_url = sgqlc.types.Field(URI, graphql_name="websiteUrl") + """The URL of the enterprise website.""" + + +class EnterpriseAdministratorInvitation(sgqlc.types.Type, Node): + """An invitation for a user to become an owner or billing manager of + an enterprise. + """ + + __schema__ = github_schema + __field_names__ = ("created_at", "email", "enterprise", "invitee", "inviter", "role") + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + email = sgqlc.types.Field(String, graphql_name="email") + """The email of the person who was invited to the enterprise.""" + + enterprise = sgqlc.types.Field(sgqlc.types.non_null(Enterprise), graphql_name="enterprise") + """The enterprise the invitation is for.""" + + invitee = sgqlc.types.Field("User", graphql_name="invitee") + """The user who was invited to the enterprise.""" + + inviter = sgqlc.types.Field("User", graphql_name="inviter") + """The user who created the invitation.""" + + role = sgqlc.types.Field(sgqlc.types.non_null(EnterpriseAdministratorRole), graphql_name="role") + """The invitee's pending role in the enterprise (owner or + billing_manager). + """ + + +class EnterpriseIdentityProvider(sgqlc.types.Type, Node): + """An identity provider configured to provision identities for an + enterprise. + """ + + __schema__ = github_schema + __field_names__ = ( + "digest_method", + "enterprise", + "external_identities", + "idp_certificate", + "issuer", + "recovery_codes", + "signature_method", + "sso_url", + ) + digest_method = sgqlc.types.Field(SamlDigestAlgorithm, graphql_name="digestMethod") + """The digest algorithm used to sign SAML requests for the identity + provider. + """ + + enterprise = sgqlc.types.Field(Enterprise, graphql_name="enterprise") + """The enterprise this identity provider belongs to.""" + + external_identities = sgqlc.types.Field( + sgqlc.types.non_null(ExternalIdentityConnection), + graphql_name="externalIdentities", + args=sgqlc.types.ArgDict( + ( + ("members_only", sgqlc.types.Arg(Boolean, graphql_name="membersOnly", default=None)), + ("login", sgqlc.types.Arg(String, graphql_name="login", default=None)), + ("user_name", sgqlc.types.Arg(String, graphql_name="userName", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """ExternalIdentities provisioned by this identity provider. + + Arguments: + + * `members_only` (`Boolean`): Filter to external identities with + valid org membership only + * `login` (`String`): Filter to external identities with the users + login + * `user_name` (`String`): Filter to external identities with the + users userName/NameID attribute + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + idp_certificate = sgqlc.types.Field(X509Certificate, graphql_name="idpCertificate") + """The x509 certificate used by the identity provider to sign + assertions and responses. + """ + + issuer = sgqlc.types.Field(String, graphql_name="issuer") + """The Issuer Entity ID for the SAML identity provider.""" + + recovery_codes = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(String)), graphql_name="recoveryCodes") + """Recovery codes that can be used by admins to access the enterprise + if the identity provider is unavailable. + """ + + signature_method = sgqlc.types.Field(SamlSignatureAlgorithm, graphql_name="signatureMethod") + """The signature algorithm used to sign SAML requests for the + identity provider. + """ + + sso_url = sgqlc.types.Field(URI, graphql_name="ssoUrl") + """The URL endpoint for the identity provider's SAML SSO.""" + + +class EnterpriseRepositoryInfo(sgqlc.types.Type, Node): + """A subset of repository information queryable from an enterprise.""" + + __schema__ = github_schema + __field_names__ = ("is_private", "name", "name_with_owner") + is_private = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isPrivate") + """Identifies if the repository is private or internal.""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The repository's name.""" + + name_with_owner = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="nameWithOwner") + """The repository's name with owner.""" + + +class EnterpriseServerInstallation(sgqlc.types.Type, Node): + """An Enterprise Server installation.""" + + __schema__ = github_schema + __field_names__ = ("created_at", "customer_name", "host_name", "is_connected", "updated_at", "user_accounts", "user_accounts_uploads") + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + customer_name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="customerName") + """The customer name to which the Enterprise Server installation + belongs. + """ + + host_name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="hostName") + """The host name of the Enterprise Server installation.""" + + is_connected = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isConnected") + """Whether or not the installation is connected to an Enterprise + Server installation via GitHub Connect. + """ + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + user_accounts = sgqlc.types.Field( + sgqlc.types.non_null(EnterpriseServerUserAccountConnection), + graphql_name="userAccounts", + args=sgqlc.types.ArgDict( + ( + ( + "order_by", + sgqlc.types.Arg( + EnterpriseServerUserAccountOrder, graphql_name="orderBy", default={"field": "LOGIN", "direction": "ASC"} + ), + ), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """User accounts on this Enterprise Server installation. + + Arguments: + + * `order_by` (`EnterpriseServerUserAccountOrder`): Ordering + options for Enterprise Server user accounts returned from the + connection. (default: `{field: LOGIN, direction: ASC}`) + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + user_accounts_uploads = sgqlc.types.Field( + sgqlc.types.non_null(EnterpriseServerUserAccountsUploadConnection), + graphql_name="userAccountsUploads", + args=sgqlc.types.ArgDict( + ( + ( + "order_by", + sgqlc.types.Arg( + EnterpriseServerUserAccountsUploadOrder, + graphql_name="orderBy", + default={"field": "CREATED_AT", "direction": "DESC"}, + ), + ), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """User accounts uploads for the Enterprise Server installation. + + Arguments: + + * `order_by` (`EnterpriseServerUserAccountsUploadOrder`): Ordering + options for Enterprise Server user accounts uploads returned + from the connection. (default: `{field: CREATED_AT, direction: + DESC}`) + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + +class EnterpriseServerUserAccount(sgqlc.types.Type, Node): + """A user account on an Enterprise Server installation.""" + + __schema__ = github_schema + __field_names__ = ( + "created_at", + "emails", + "enterprise_server_installation", + "is_site_admin", + "login", + "profile_name", + "remote_created_at", + "remote_user_id", + "updated_at", + ) + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + emails = sgqlc.types.Field( + sgqlc.types.non_null(EnterpriseServerUserAccountEmailConnection), + graphql_name="emails", + args=sgqlc.types.ArgDict( + ( + ( + "order_by", + sgqlc.types.Arg( + EnterpriseServerUserAccountEmailOrder, graphql_name="orderBy", default={"field": "EMAIL", "direction": "ASC"} + ), + ), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """User emails belonging to this user account. + + Arguments: + + * `order_by` (`EnterpriseServerUserAccountEmailOrder`): Ordering + options for Enterprise Server user account emails returned from + the connection. (default: `{field: EMAIL, direction: ASC}`) + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + enterprise_server_installation = sgqlc.types.Field( + sgqlc.types.non_null(EnterpriseServerInstallation), graphql_name="enterpriseServerInstallation" + ) + """The Enterprise Server installation on which this user account + exists. + """ + + is_site_admin = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isSiteAdmin") + """Whether the user account is a site administrator on the Enterprise + Server installation. + """ + + login = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="login") + """The login of the user account on the Enterprise Server + installation. + """ + + profile_name = sgqlc.types.Field(String, graphql_name="profileName") + """The profile name of the user account on the Enterprise Server + installation. + """ + + remote_created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="remoteCreatedAt") + """The date and time when the user account was created on the + Enterprise Server installation. + """ + + remote_user_id = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="remoteUserId") + """The ID of the user account on the Enterprise Server installation.""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + +class EnterpriseServerUserAccountEmail(sgqlc.types.Type, Node): + """An email belonging to a user account on an Enterprise Server + installation. + """ + + __schema__ = github_schema + __field_names__ = ("created_at", "email", "is_primary", "updated_at", "user_account") + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + email = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="email") + """The email address.""" + + is_primary = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isPrimary") + """Indicates whether this is the primary email of the associated user + account. + """ + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + user_account = sgqlc.types.Field(sgqlc.types.non_null(EnterpriseServerUserAccount), graphql_name="userAccount") + """The user account to which the email belongs.""" + + +class EnterpriseServerUserAccountsUpload(sgqlc.types.Type, Node): + """A user accounts upload from an Enterprise Server installation.""" + + __schema__ = github_schema + __field_names__ = ("created_at", "enterprise", "enterprise_server_installation", "name", "sync_state", "updated_at") + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + enterprise = sgqlc.types.Field(sgqlc.types.non_null(Enterprise), graphql_name="enterprise") + """The enterprise to which this upload belongs.""" + + enterprise_server_installation = sgqlc.types.Field( + sgqlc.types.non_null(EnterpriseServerInstallation), graphql_name="enterpriseServerInstallation" + ) + """The Enterprise Server installation for which this upload was + generated. + """ + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The name of the file uploaded.""" + + sync_state = sgqlc.types.Field(sgqlc.types.non_null(EnterpriseServerUserAccountsUploadSyncState), graphql_name="syncState") + """The synchronization state of the upload""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + +class EnterpriseUserAccount(sgqlc.types.Type, Actor, Node): + """An account for a user who is an admin of an enterprise or a member + of an enterprise through one or more organizations. + """ + + __schema__ = github_schema + __field_names__ = ("created_at", "enterprise", "name", "organizations", "updated_at", "user") + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + enterprise = sgqlc.types.Field(sgqlc.types.non_null(Enterprise), graphql_name="enterprise") + """The enterprise in which this user account exists.""" + + name = sgqlc.types.Field(String, graphql_name="name") + """The name of the enterprise user account""" + + organizations = sgqlc.types.Field( + sgqlc.types.non_null(EnterpriseOrganizationMembershipConnection), + graphql_name="organizations", + args=sgqlc.types.ArgDict( + ( + ("query", sgqlc.types.Arg(String, graphql_name="query", default=None)), + ("order_by", sgqlc.types.Arg(OrganizationOrder, graphql_name="orderBy", default={"field": "LOGIN", "direction": "ASC"})), + ("role", sgqlc.types.Arg(EnterpriseUserAccountMembershipRole, graphql_name="role", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of enterprise organizations this user is a member of. + + Arguments: + + * `query` (`String`): The search string to look for. + * `order_by` (`OrganizationOrder`): Ordering options for + organizations returned from the connection. (default: `{field: + LOGIN, direction: ASC}`) + * `role` (`EnterpriseUserAccountMembershipRole`): The role of the + user in the enterprise organization. + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + user = sgqlc.types.Field("User", graphql_name="user") + """The user within the enterprise.""" + + +class Environment(sgqlc.types.Type, Node): + """An environment.""" + + __schema__ = github_schema + __field_names__ = ("database_id", "name", "protection_rules") + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The name of the environment""" + + protection_rules = sgqlc.types.Field( + sgqlc.types.non_null(DeploymentProtectionRuleConnection), + graphql_name="protectionRules", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """The protection rules defined for this environment + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + +class ExternalIdentity(sgqlc.types.Type, Node): + """An external identity provisioned by SAML SSO or SCIM.""" + + __schema__ = github_schema + __field_names__ = ("guid", "organization_invitation", "saml_identity", "scim_identity", "user") + guid = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="guid") + """The GUID for this identity""" + + organization_invitation = sgqlc.types.Field("OrganizationInvitation", graphql_name="organizationInvitation") + """Organization invitation for this SCIM-provisioned external + identity + """ + + saml_identity = sgqlc.types.Field(ExternalIdentitySamlAttributes, graphql_name="samlIdentity") + """SAML Identity attributes""" + + scim_identity = sgqlc.types.Field(ExternalIdentityScimAttributes, graphql_name="scimIdentity") + """SCIM Identity attributes""" + + user = sgqlc.types.Field("User", graphql_name="user") + """User linked to this external identity. Will be NULL if this + identity has not been claimed by an organization member. + """ + + +class GenericHovercardContext(sgqlc.types.Type, HovercardContext): + """A generic hovercard context with a message and icon""" + + __schema__ = github_schema + __field_names__ = () + + +class Gist(sgqlc.types.Type, Node, Starrable, UniformResourceLocatable): + """A Gist.""" + + __schema__ = github_schema + __field_names__ = ( + "comments", + "created_at", + "description", + "files", + "forks", + "is_fork", + "is_public", + "name", + "owner", + "pushed_at", + "updated_at", + ) + comments = sgqlc.types.Field( + sgqlc.types.non_null(GistCommentConnection), + graphql_name="comments", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of comments associated with the gist + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + description = sgqlc.types.Field(String, graphql_name="description") + """The gist description.""" + + files = sgqlc.types.Field( + sgqlc.types.list_of(GistFile), + graphql_name="files", + args=sgqlc.types.ArgDict( + ( + ("limit", sgqlc.types.Arg(Int, graphql_name="limit", default=10)), + ("oid", sgqlc.types.Arg(GitObjectID, graphql_name="oid", default=None)), + ) + ), + ) + """The files in this gist. + + Arguments: + + * `limit` (`Int`): The maximum number of files to return. + (default: `10`) + * `oid` (`GitObjectID`): The oid of the files to return + """ + + forks = sgqlc.types.Field( + sgqlc.types.non_null(GistConnection), + graphql_name="forks", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("order_by", sgqlc.types.Arg(GistOrder, graphql_name="orderBy", default=None)), + ) + ), + ) + """A list of forks associated with the gist + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `order_by` (`GistOrder`): Ordering options for gists returned + from the connection + """ + + is_fork = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isFork") + """Identifies if the gist is a fork.""" + + is_public = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isPublic") + """Whether the gist is public or not.""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The gist name.""" + + owner = sgqlc.types.Field(RepositoryOwner, graphql_name="owner") + """The gist owner.""" + + pushed_at = sgqlc.types.Field(DateTime, graphql_name="pushedAt") + """Identifies when the gist was last pushed to.""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + +class GistComment(sgqlc.types.Type, Node, Comment, Deletable, Minimizable, Updatable, UpdatableComment): + """Represents a comment on an Gist.""" + + __schema__ = github_schema + __field_names__ = ("database_id", "gist") + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + gist = sgqlc.types.Field(sgqlc.types.non_null(Gist), graphql_name="gist") + """The associated gist.""" + + +class GpgSignature(sgqlc.types.Type, GitSignature): + """Represents a GPG signature on a Commit or Tag.""" + + __schema__ = github_schema + __field_names__ = ("key_id",) + key_id = sgqlc.types.Field(String, graphql_name="keyId") + """Hex-encoded ID of the key that signed this object.""" + + +class HeadRefDeletedEvent(sgqlc.types.Type, Node): + """Represents a 'head_ref_deleted' event on a given pull request.""" + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "head_ref", "head_ref_name", "pull_request") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + head_ref = sgqlc.types.Field("Ref", graphql_name="headRef") + """Identifies the Ref associated with the `head_ref_deleted` event.""" + + head_ref_name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="headRefName") + """Identifies the name of the Ref associated with the + `head_ref_deleted` event. + """ + + pull_request = sgqlc.types.Field(sgqlc.types.non_null("PullRequest"), graphql_name="pullRequest") + """PullRequest referenced by event.""" + + +class HeadRefForcePushedEvent(sgqlc.types.Type, Node): + """Represents a 'head_ref_force_pushed' event on a given pull + request. + """ + + __schema__ = github_schema + __field_names__ = ("actor", "after_commit", "before_commit", "created_at", "pull_request", "ref") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + after_commit = sgqlc.types.Field(Commit, graphql_name="afterCommit") + """Identifies the after commit SHA for the 'head_ref_force_pushed' + event. + """ + + before_commit = sgqlc.types.Field(Commit, graphql_name="beforeCommit") + """Identifies the before commit SHA for the 'head_ref_force_pushed' + event. + """ + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + pull_request = sgqlc.types.Field(sgqlc.types.non_null("PullRequest"), graphql_name="pullRequest") + """PullRequest referenced by event.""" + + ref = sgqlc.types.Field("Ref", graphql_name="ref") + """Identifies the fully qualified ref name for the + 'head_ref_force_pushed' event. + """ + + +class HeadRefRestoredEvent(sgqlc.types.Type, Node): + """Represents a 'head_ref_restored' event on a given pull request.""" + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "pull_request") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + pull_request = sgqlc.types.Field(sgqlc.types.non_null("PullRequest"), graphql_name="pullRequest") + """PullRequest referenced by event.""" + + +class IpAllowListEntry(sgqlc.types.Type, Node): + """An IP address or range of addresses that is allowed to access an + owner's resources. + """ + + __schema__ = github_schema + __field_names__ = ("allow_list_value", "created_at", "is_active", "name", "owner", "updated_at") + allow_list_value = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="allowListValue") + """A single IP address or range of IP addresses in CIDR notation.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + is_active = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isActive") + """Whether the entry is currently active.""" + + name = sgqlc.types.Field(String, graphql_name="name") + """The name of the IP allow list entry.""" + + owner = sgqlc.types.Field(sgqlc.types.non_null("IpAllowListOwner"), graphql_name="owner") + """The owner of the IP allow list entry.""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + +class Issue( + sgqlc.types.Type, + Node, + Assignable, + Closable, + Comment, + Updatable, + UpdatableComment, + Labelable, + Lockable, + Reactable, + RepositoryNode, + Subscribable, + UniformResourceLocatable, + ProjectNextOwner, +): + """An Issue is a place to discuss ideas, enhancements, tasks, and + bugs for a project. + """ + + __schema__ = github_schema + __field_names__ = ( + "body_resource_path", + "body_url", + "comments", + "hovercard", + "is_pinned", + "is_read_by_viewer", + "milestone", + "number", + "participants", + "project_cards", + "project_next_items", + "state", + "state_reason", + "timeline_items", + "title", + "title_html", + "tracked_in_issues", + "tracked_issues", + "tracked_issues_count", + ) + body_resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="bodyResourcePath") + """The http path for this issue body""" + + body_url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="bodyUrl") + """The http URL for this issue body""" + + comments = sgqlc.types.Field( + sgqlc.types.non_null(IssueCommentConnection), + graphql_name="comments", + args=sgqlc.types.ArgDict( + ( + ("order_by", sgqlc.types.Arg(IssueCommentOrder, graphql_name="orderBy", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of comments associated with the Issue. + + Arguments: + + * `order_by` (`IssueCommentOrder`): Ordering options for issue + comments returned from the connection. + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + hovercard = sgqlc.types.Field( + sgqlc.types.non_null(Hovercard), + graphql_name="hovercard", + args=sgqlc.types.ArgDict( + (("include_notification_contexts", sgqlc.types.Arg(Boolean, graphql_name="includeNotificationContexts", default=True)),) + ), + ) + """The hovercard information for this issue + + Arguments: + + * `include_notification_contexts` (`Boolean`): Whether or not to + include notification contexts (default: `true`) + """ + + is_pinned = sgqlc.types.Field(Boolean, graphql_name="isPinned") + """Indicates whether or not this issue is currently pinned to the + repository issues list + """ + + is_read_by_viewer = sgqlc.types.Field(Boolean, graphql_name="isReadByViewer") + """Is this issue read by the viewer""" + + milestone = sgqlc.types.Field("Milestone", graphql_name="milestone") + """Identifies the milestone associated with the issue.""" + + number = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="number") + """Identifies the issue number.""" + + participants = sgqlc.types.Field( + sgqlc.types.non_null(UserConnection), + graphql_name="participants", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of Users that are participating in the Issue conversation. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + project_cards = sgqlc.types.Field( + sgqlc.types.non_null(ProjectCardConnection), + graphql_name="projectCards", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ( + "archived_states", + sgqlc.types.Arg( + sgqlc.types.list_of(ProjectCardArchivedState), graphql_name="archivedStates", default=("ARCHIVED", "NOT_ARCHIVED") + ), + ), + ) + ), + ) + """List of project cards associated with this issue. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `archived_states` (`[ProjectCardArchivedState]`): A list of + archived states to filter the cards by (default: `[ARCHIVED, + NOT_ARCHIVED]`) + """ + + project_next_items = sgqlc.types.Field( + sgqlc.types.non_null(ProjectNextItemConnection), + graphql_name="projectNextItems", + args=sgqlc.types.ArgDict( + ( + ("include_archived", sgqlc.types.Arg(Boolean, graphql_name="includeArchived", default=True)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """List of project (beta) items associated with this issue. + + Arguments: + + * `include_archived` (`Boolean`): Include archived items. + (default: `true`) + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + state = sgqlc.types.Field(sgqlc.types.non_null(IssueState), graphql_name="state") + """Identifies the state of the issue.""" + + state_reason = sgqlc.types.Field(IssueStateReason, graphql_name="stateReason") + """Identifies the reason for the issue state.""" + + timeline_items = sgqlc.types.Field( + sgqlc.types.non_null(IssueTimelineItemsConnection), + graphql_name="timelineItems", + args=sgqlc.types.ArgDict( + ( + ("since", sgqlc.types.Arg(DateTime, graphql_name="since", default=None)), + ("skip", sgqlc.types.Arg(Int, graphql_name="skip", default=None)), + ( + "item_types", + sgqlc.types.Arg( + sgqlc.types.list_of(sgqlc.types.non_null(IssueTimelineItemsItemType)), graphql_name="itemTypes", default=None + ), + ), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of events, comments, commits, etc. associated with the + issue. + + Arguments: + + * `since` (`DateTime`): Filter timeline items by a `since` + timestamp. + * `skip` (`Int`): Skips the first _n_ elements in the list. + * `item_types` (`[IssueTimelineItemsItemType!]`): Filter timeline + items by type. + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + title = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="title") + """Identifies the issue title.""" + + title_html = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="titleHTML") + """Identifies the issue title rendered to HTML.""" + + tracked_in_issues = sgqlc.types.Field( + sgqlc.types.non_null(IssueConnection), + graphql_name="trackedInIssues", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of issues that track this issue + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + tracked_issues = sgqlc.types.Field( + sgqlc.types.non_null(IssueConnection), + graphql_name="trackedIssues", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of issues tracked inside the current issue + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + tracked_issues_count = sgqlc.types.Field( + sgqlc.types.non_null(Int), + graphql_name="trackedIssuesCount", + args=sgqlc.types.ArgDict( + (("states", sgqlc.types.Arg(sgqlc.types.list_of(TrackedIssueStates), graphql_name="states", default=None)),) + ), + ) + """The number of tracked issues for this issue + + Arguments: + + * `states` (`[TrackedIssueStates]`): Limit the count to tracked + issues with the specified states. + """ + + +class IssueComment(sgqlc.types.Type, Node, Comment, Deletable, Minimizable, Updatable, UpdatableComment, Reactable, RepositoryNode): + """Represents a comment on an Issue.""" + + __schema__ = github_schema + __field_names__ = ("issue", "pull_request", "resource_path", "url") + issue = sgqlc.types.Field(sgqlc.types.non_null(Issue), graphql_name="issue") + """Identifies the issue associated with the comment.""" + + pull_request = sgqlc.types.Field("PullRequest", graphql_name="pullRequest") + """Returns the pull request associated with the comment, if this + comment was made on a pull request. + """ + + resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="resourcePath") + """The HTTP path for this issue comment""" + + url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="url") + """The HTTP URL for this issue comment""" + + +class JoinedGitHubContribution(sgqlc.types.Type, Contribution): + """Represents a user signing up for a GitHub account.""" + + __schema__ = github_schema + __field_names__ = () + + +class Label(sgqlc.types.Type, Node): + """A label for categorizing Issues, Pull Requests, Milestones, or + Discussions with a given Repository. + """ + + __schema__ = github_schema + __field_names__ = ( + "color", + "created_at", + "description", + "is_default", + "issues", + "name", + "pull_requests", + "repository", + "resource_path", + "updated_at", + "url", + ) + color = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="color") + """Identifies the label color.""" + + created_at = sgqlc.types.Field(DateTime, graphql_name="createdAt") + """Identifies the date and time when the label was created.""" + + description = sgqlc.types.Field(String, graphql_name="description") + """A brief description of this label.""" + + is_default = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isDefault") + """Indicates whether or not this is a default label.""" + + issues = sgqlc.types.Field( + sgqlc.types.non_null(IssueConnection), + graphql_name="issues", + args=sgqlc.types.ArgDict( + ( + ("order_by", sgqlc.types.Arg(IssueOrder, graphql_name="orderBy", default=None)), + ("labels", sgqlc.types.Arg(sgqlc.types.list_of(sgqlc.types.non_null(String)), graphql_name="labels", default=None)), + ("states", sgqlc.types.Arg(sgqlc.types.list_of(sgqlc.types.non_null(IssueState)), graphql_name="states", default=None)), + ("filter_by", sgqlc.types.Arg(IssueFilters, graphql_name="filterBy", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of issues associated with this label. + + Arguments: + + * `order_by` (`IssueOrder`): Ordering options for issues returned + from the connection. + * `labels` (`[String!]`): A list of label names to filter the pull + requests by. + * `states` (`[IssueState!]`): A list of states to filter the + issues by. + * `filter_by` (`IssueFilters`): Filtering options for issues + returned from the connection. + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """Identifies the label name.""" + + pull_requests = sgqlc.types.Field( + sgqlc.types.non_null(PullRequestConnection), + graphql_name="pullRequests", + args=sgqlc.types.ArgDict( + ( + ( + "states", + sgqlc.types.Arg(sgqlc.types.list_of(sgqlc.types.non_null(PullRequestState)), graphql_name="states", default=None), + ), + ("labels", sgqlc.types.Arg(sgqlc.types.list_of(sgqlc.types.non_null(String)), graphql_name="labels", default=None)), + ("head_ref_name", sgqlc.types.Arg(String, graphql_name="headRefName", default=None)), + ("base_ref_name", sgqlc.types.Arg(String, graphql_name="baseRefName", default=None)), + ("order_by", sgqlc.types.Arg(IssueOrder, graphql_name="orderBy", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of pull requests associated with this label. + + Arguments: + + * `states` (`[PullRequestState!]`): A list of states to filter the + pull requests by. + * `labels` (`[String!]`): A list of label names to filter the pull + requests by. + * `head_ref_name` (`String`): The head ref name to filter the pull + requests by. + * `base_ref_name` (`String`): The base ref name to filter the pull + requests by. + * `order_by` (`IssueOrder`): Ordering options for pull requests + returned from the connection. + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + repository = sgqlc.types.Field(sgqlc.types.non_null("Repository"), graphql_name="repository") + """The repository associated with this label.""" + + resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="resourcePath") + """The HTTP path for this label.""" + + updated_at = sgqlc.types.Field(DateTime, graphql_name="updatedAt") + """Identifies the date and time when the label was last updated.""" + + url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="url") + """The HTTP URL for this label.""" + + +class LabeledEvent(sgqlc.types.Type, Node): + """Represents a 'labeled' event on a given issue or pull request.""" + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "label", "labelable") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + label = sgqlc.types.Field(sgqlc.types.non_null(Label), graphql_name="label") + """Identifies the label associated with the 'labeled' event.""" + + labelable = sgqlc.types.Field(sgqlc.types.non_null(Labelable), graphql_name="labelable") + """Identifies the `Labelable` associated with the event.""" + + +class Language(sgqlc.types.Type, Node): + """Represents a given language found in repositories.""" + + __schema__ = github_schema + __field_names__ = ("color", "name") + color = sgqlc.types.Field(String, graphql_name="color") + """The color defined for the current language.""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The name of the current language.""" + + +class License(sgqlc.types.Type, Node): + """A repository's open source license""" + + __schema__ = github_schema + __field_names__ = ( + "body", + "conditions", + "description", + "featured", + "hidden", + "implementation", + "key", + "limitations", + "name", + "nickname", + "permissions", + "pseudo_license", + "spdx_id", + "url", + ) + body = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="body") + """The full text of the license""" + + conditions = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(LicenseRule)), graphql_name="conditions") + """The conditions set by the license""" + + description = sgqlc.types.Field(String, graphql_name="description") + """A human-readable description of the license""" + + featured = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="featured") + """Whether the license should be featured""" + + hidden = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="hidden") + """Whether the license should be displayed in license pickers""" + + implementation = sgqlc.types.Field(String, graphql_name="implementation") + """Instructions on how to implement the license""" + + key = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="key") + """The lowercased SPDX ID of the license""" + + limitations = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(LicenseRule)), graphql_name="limitations") + """The limitations set by the license""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The license full name specified by """ + + nickname = sgqlc.types.Field(String, graphql_name="nickname") + """Customary short name if applicable (e.g, GPLv3)""" + + permissions = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(LicenseRule)), graphql_name="permissions") + """The permissions set by the license""" + + pseudo_license = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="pseudoLicense") + """Whether the license is a pseudo-license placeholder (e.g., other, + no-license) + """ + + spdx_id = sgqlc.types.Field(String, graphql_name="spdxId") + """Short identifier specified by """ + + url = sgqlc.types.Field(URI, graphql_name="url") + """URL to the license on """ + + +class LockedEvent(sgqlc.types.Type, Node): + """Represents a 'locked' event on a given issue or pull request.""" + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "lock_reason", "lockable") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + lock_reason = sgqlc.types.Field(LockReason, graphql_name="lockReason") + """Reason that the conversation was locked (optional).""" + + lockable = sgqlc.types.Field(sgqlc.types.non_null(Lockable), graphql_name="lockable") + """Object that was locked.""" + + +class Mannequin(sgqlc.types.Type, Node, Actor, UniformResourceLocatable): + """A placeholder user for attribution of imported data on GitHub.""" + + __schema__ = github_schema + __field_names__ = ("claimant", "created_at", "database_id", "email", "updated_at") + claimant = sgqlc.types.Field("User", graphql_name="claimant") + """The user that has claimed the data attributed to this mannequin.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + email = sgqlc.types.Field(String, graphql_name="email") + """The mannequin's email on the source instance.""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + +class MarkedAsDuplicateEvent(sgqlc.types.Type, Node): + """Represents a 'marked_as_duplicate' event on a given issue or pull + request. + """ + + __schema__ = github_schema + __field_names__ = ("actor", "canonical", "created_at", "duplicate", "is_cross_repository") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + canonical = sgqlc.types.Field("IssueOrPullRequest", graphql_name="canonical") + """The authoritative issue or pull request which has been duplicated + by another. + """ + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + duplicate = sgqlc.types.Field("IssueOrPullRequest", graphql_name="duplicate") + """The issue or pull request which has been marked as a duplicate of + another. + """ + + is_cross_repository = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isCrossRepository") + """Canonical and duplicate belong to different repositories.""" + + +class MarketplaceCategory(sgqlc.types.Type, Node): + """A public description of a Marketplace category.""" + + __schema__ = github_schema + __field_names__ = ( + "description", + "how_it_works", + "name", + "primary_listing_count", + "resource_path", + "secondary_listing_count", + "slug", + "url", + ) + description = sgqlc.types.Field(String, graphql_name="description") + """The category's description.""" + + how_it_works = sgqlc.types.Field(String, graphql_name="howItWorks") + """The technical description of how apps listed in this category work + with GitHub. + """ + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The category's name.""" + + primary_listing_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="primaryListingCount") + """How many Marketplace listings have this as their primary category.""" + + resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="resourcePath") + """The HTTP path for this Marketplace category.""" + + secondary_listing_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="secondaryListingCount") + """How many Marketplace listings have this as their secondary + category. + """ + + slug = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="slug") + """The short name of the category used in its URL.""" + + url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="url") + """The HTTP URL for this Marketplace category.""" + + +class MarketplaceListing(sgqlc.types.Type, Node): + """A listing in the GitHub integration marketplace.""" + + __schema__ = github_schema + __field_names__ = ( + "app", + "company_url", + "configuration_resource_path", + "configuration_url", + "documentation_url", + "extended_description", + "extended_description_html", + "full_description", + "full_description_html", + "has_published_free_trial_plans", + "has_terms_of_service", + "has_verified_owner", + "how_it_works", + "how_it_works_html", + "installation_url", + "installed_for_viewer", + "is_archived", + "is_draft", + "is_paid", + "is_public", + "is_rejected", + "is_unverified", + "is_unverified_pending", + "is_verification_pending_from_draft", + "is_verification_pending_from_unverified", + "is_verified", + "logo_background_color", + "logo_url", + "name", + "normalized_short_description", + "pricing_url", + "primary_category", + "privacy_policy_url", + "resource_path", + "screenshot_urls", + "secondary_category", + "short_description", + "slug", + "status_url", + "support_email", + "support_url", + "terms_of_service_url", + "url", + "viewer_can_add_plans", + "viewer_can_approve", + "viewer_can_delist", + "viewer_can_edit", + "viewer_can_edit_categories", + "viewer_can_edit_plans", + "viewer_can_redraft", + "viewer_can_reject", + "viewer_can_request_approval", + "viewer_has_purchased", + "viewer_has_purchased_for_all_organizations", + "viewer_is_listing_admin", + ) + app = sgqlc.types.Field(App, graphql_name="app") + """The GitHub App this listing represents.""" + + company_url = sgqlc.types.Field(URI, graphql_name="companyUrl") + """URL to the listing owner's company site.""" + + configuration_resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="configurationResourcePath") + """The HTTP path for configuring access to the listing's integration + or OAuth app + """ + + configuration_url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="configurationUrl") + """The HTTP URL for configuring access to the listing's integration + or OAuth app + """ + + documentation_url = sgqlc.types.Field(URI, graphql_name="documentationUrl") + """URL to the listing's documentation.""" + + extended_description = sgqlc.types.Field(String, graphql_name="extendedDescription") + """The listing's detailed description.""" + + extended_description_html = sgqlc.types.Field(sgqlc.types.non_null(HTML), graphql_name="extendedDescriptionHTML") + """The listing's detailed description rendered to HTML.""" + + full_description = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="fullDescription") + """The listing's introductory description.""" + + full_description_html = sgqlc.types.Field(sgqlc.types.non_null(HTML), graphql_name="fullDescriptionHTML") + """The listing's introductory description rendered to HTML.""" + + has_published_free_trial_plans = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="hasPublishedFreeTrialPlans") + """Does this listing have any plans with a free trial?""" + + has_terms_of_service = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="hasTermsOfService") + """Does this listing have a terms of service link?""" + + has_verified_owner = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="hasVerifiedOwner") + """Whether the creator of the app is a verified org""" + + how_it_works = sgqlc.types.Field(String, graphql_name="howItWorks") + """A technical description of how this app works with GitHub.""" + + how_it_works_html = sgqlc.types.Field(sgqlc.types.non_null(HTML), graphql_name="howItWorksHTML") + """The listing's technical description rendered to HTML.""" + + installation_url = sgqlc.types.Field(URI, graphql_name="installationUrl") + """URL to install the product to the viewer's account or + organization. + """ + + installed_for_viewer = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="installedForViewer") + """Whether this listing's app has been installed for the current + viewer + """ + + is_archived = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isArchived") + """Whether this listing has been removed from the Marketplace.""" + + is_draft = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isDraft") + """Whether this listing is still an editable draft that has not been + submitted for review and is not publicly visible in the + Marketplace. + """ + + is_paid = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isPaid") + """Whether the product this listing represents is available as part + of a paid plan. + """ + + is_public = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isPublic") + """Whether this listing has been approved for display in the + Marketplace. + """ + + is_rejected = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isRejected") + """Whether this listing has been rejected by GitHub for display in + the Marketplace. + """ + + is_unverified = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isUnverified") + """Whether this listing has been approved for unverified display in + the Marketplace. + """ + + is_unverified_pending = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isUnverifiedPending") + """Whether this draft listing has been submitted for review for + approval to be unverified in the Marketplace. + """ + + is_verification_pending_from_draft = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isVerificationPendingFromDraft") + """Whether this draft listing has been submitted for review from + GitHub for approval to be verified in the Marketplace. + """ + + is_verification_pending_from_unverified = sgqlc.types.Field( + sgqlc.types.non_null(Boolean), graphql_name="isVerificationPendingFromUnverified" + ) + """Whether this unverified listing has been submitted for review from + GitHub for approval to be verified in the Marketplace. + """ + + is_verified = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isVerified") + """Whether this listing has been approved for verified display in the + Marketplace. + """ + + logo_background_color = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="logoBackgroundColor") + """The hex color code, without the leading '#', for the logo + background. + """ + + logo_url = sgqlc.types.Field( + URI, graphql_name="logoUrl", args=sgqlc.types.ArgDict((("size", sgqlc.types.Arg(Int, graphql_name="size", default=400)),)) + ) + """URL for the listing's logo image. + + Arguments: + + * `size` (`Int`): The size in pixels of the resulting square + image. (default: `400`) + """ + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The listing's full name.""" + + normalized_short_description = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="normalizedShortDescription") + """The listing's very short description without a trailing period or + ampersands. + """ + + pricing_url = sgqlc.types.Field(URI, graphql_name="pricingUrl") + """URL to the listing's detailed pricing.""" + + primary_category = sgqlc.types.Field(sgqlc.types.non_null(MarketplaceCategory), graphql_name="primaryCategory") + """The category that best describes the listing.""" + + privacy_policy_url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="privacyPolicyUrl") + """URL to the listing's privacy policy, may return an empty string + for listings that do not require a privacy policy URL. + """ + + resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="resourcePath") + """The HTTP path for the Marketplace listing.""" + + screenshot_urls = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(String)), graphql_name="screenshotUrls") + """The URLs for the listing's screenshots.""" + + secondary_category = sgqlc.types.Field(MarketplaceCategory, graphql_name="secondaryCategory") + """An alternate category that describes the listing.""" + + short_description = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="shortDescription") + """The listing's very short description.""" + + slug = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="slug") + """The short name of the listing used in its URL.""" + + status_url = sgqlc.types.Field(URI, graphql_name="statusUrl") + """URL to the listing's status page.""" + + support_email = sgqlc.types.Field(String, graphql_name="supportEmail") + """An email address for support for this listing's app.""" + + support_url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="supportUrl") + """Either a URL or an email address for support for this listing's + app, may return an empty string for listings that do not require a + support URL. + """ + + terms_of_service_url = sgqlc.types.Field(URI, graphql_name="termsOfServiceUrl") + """URL to the listing's terms of service.""" + + url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="url") + """The HTTP URL for the Marketplace listing.""" + + viewer_can_add_plans = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanAddPlans") + """Can the current viewer add plans for this Marketplace listing.""" + + viewer_can_approve = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanApprove") + """Can the current viewer approve this Marketplace listing.""" + + viewer_can_delist = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanDelist") + """Can the current viewer delist this Marketplace listing.""" + + viewer_can_edit = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanEdit") + """Can the current viewer edit this Marketplace listing.""" + + viewer_can_edit_categories = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanEditCategories") + """Can the current viewer edit the primary and secondary category of + this Marketplace listing. + """ + + viewer_can_edit_plans = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanEditPlans") + """Can the current viewer edit the plans for this Marketplace + listing. + """ + + viewer_can_redraft = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanRedraft") + """Can the current viewer return this Marketplace listing to draft + state so it becomes editable again. + """ + + viewer_can_reject = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanReject") + """Can the current viewer reject this Marketplace listing by + returning it to an editable draft state or rejecting it entirely. + """ + + viewer_can_request_approval = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanRequestApproval") + """Can the current viewer request this listing be reviewed for + display in the Marketplace as verified. + """ + + viewer_has_purchased = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerHasPurchased") + """Indicates whether the current user has an active subscription to + this Marketplace listing. + """ + + viewer_has_purchased_for_all_organizations = sgqlc.types.Field( + sgqlc.types.non_null(Boolean), graphql_name="viewerHasPurchasedForAllOrganizations" + ) + """Indicates if the current user has purchased a subscription to this + Marketplace listing for all of the organizations the user owns. + """ + + viewer_is_listing_admin = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerIsListingAdmin") + """Does the current viewer role allow them to administer this + Marketplace listing. + """ + + +class MembersCanDeleteReposClearAuditEntry(sgqlc.types.Type, Node, AuditEntry, EnterpriseAuditEntryData, OrganizationAuditEntryData): + """Audit log entry for a members_can_delete_repos.clear event.""" + + __schema__ = github_schema + __field_names__ = () + + +class MembersCanDeleteReposDisableAuditEntry(sgqlc.types.Type, Node, AuditEntry, EnterpriseAuditEntryData, OrganizationAuditEntryData): + """Audit log entry for a members_can_delete_repos.disable event.""" + + __schema__ = github_schema + __field_names__ = () + + +class MembersCanDeleteReposEnableAuditEntry(sgqlc.types.Type, Node, AuditEntry, EnterpriseAuditEntryData, OrganizationAuditEntryData): + """Audit log entry for a members_can_delete_repos.enable event.""" + + __schema__ = github_schema + __field_names__ = () + + +class MentionedEvent(sgqlc.types.Type, Node): + """Represents a 'mentioned' event on a given issue or pull request.""" + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "database_id") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + +class MergedEvent(sgqlc.types.Type, Node, UniformResourceLocatable): + """Represents a 'merged' event on a given pull request.""" + + __schema__ = github_schema + __field_names__ = ("actor", "commit", "created_at", "merge_ref", "merge_ref_name", "pull_request") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + commit = sgqlc.types.Field(Commit, graphql_name="commit") + """Identifies the commit associated with the `merge` event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + merge_ref = sgqlc.types.Field("Ref", graphql_name="mergeRef") + """Identifies the Ref associated with the `merge` event.""" + + merge_ref_name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="mergeRefName") + """Identifies the name of the Ref associated with the `merge` event.""" + + pull_request = sgqlc.types.Field(sgqlc.types.non_null("PullRequest"), graphql_name="pullRequest") + """PullRequest referenced by event.""" + + +class MigrationSource(sgqlc.types.Type, Node): + """An Octoshift migration source.""" + + __schema__ = github_schema + __field_names__ = ("name", "type", "url") + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The Octoshift migration source name.""" + + type = sgqlc.types.Field(sgqlc.types.non_null(MigrationSourceType), graphql_name="type") + """The Octoshift migration source type.""" + + url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="url") + """The Octoshift migration source URL.""" + + +class Milestone(sgqlc.types.Type, Node, Closable, UniformResourceLocatable): + """Represents a Milestone object on a given repository.""" + + __schema__ = github_schema + __field_names__ = ( + "created_at", + "creator", + "description", + "due_on", + "issues", + "number", + "progress_percentage", + "pull_requests", + "repository", + "state", + "title", + "updated_at", + ) + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + creator = sgqlc.types.Field(Actor, graphql_name="creator") + """Identifies the actor who created the milestone.""" + + description = sgqlc.types.Field(String, graphql_name="description") + """Identifies the description of the milestone.""" + + due_on = sgqlc.types.Field(DateTime, graphql_name="dueOn") + """Identifies the due date of the milestone.""" + + issues = sgqlc.types.Field( + sgqlc.types.non_null(IssueConnection), + graphql_name="issues", + args=sgqlc.types.ArgDict( + ( + ("order_by", sgqlc.types.Arg(IssueOrder, graphql_name="orderBy", default=None)), + ("labels", sgqlc.types.Arg(sgqlc.types.list_of(sgqlc.types.non_null(String)), graphql_name="labels", default=None)), + ("states", sgqlc.types.Arg(sgqlc.types.list_of(sgqlc.types.non_null(IssueState)), graphql_name="states", default=None)), + ("filter_by", sgqlc.types.Arg(IssueFilters, graphql_name="filterBy", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of issues associated with the milestone. + + Arguments: + + * `order_by` (`IssueOrder`): Ordering options for issues returned + from the connection. + * `labels` (`[String!]`): A list of label names to filter the pull + requests by. + * `states` (`[IssueState!]`): A list of states to filter the + issues by. + * `filter_by` (`IssueFilters`): Filtering options for issues + returned from the connection. + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + number = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="number") + """Identifies the number of the milestone.""" + + progress_percentage = sgqlc.types.Field(sgqlc.types.non_null(Float), graphql_name="progressPercentage") + """Identifies the percentage complete for the milestone""" + + pull_requests = sgqlc.types.Field( + sgqlc.types.non_null(PullRequestConnection), + graphql_name="pullRequests", + args=sgqlc.types.ArgDict( + ( + ( + "states", + sgqlc.types.Arg(sgqlc.types.list_of(sgqlc.types.non_null(PullRequestState)), graphql_name="states", default=None), + ), + ("labels", sgqlc.types.Arg(sgqlc.types.list_of(sgqlc.types.non_null(String)), graphql_name="labels", default=None)), + ("head_ref_name", sgqlc.types.Arg(String, graphql_name="headRefName", default=None)), + ("base_ref_name", sgqlc.types.Arg(String, graphql_name="baseRefName", default=None)), + ("order_by", sgqlc.types.Arg(IssueOrder, graphql_name="orderBy", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of pull requests associated with the milestone. + + Arguments: + + * `states` (`[PullRequestState!]`): A list of states to filter the + pull requests by. + * `labels` (`[String!]`): A list of label names to filter the pull + requests by. + * `head_ref_name` (`String`): The head ref name to filter the pull + requests by. + * `base_ref_name` (`String`): The base ref name to filter the pull + requests by. + * `order_by` (`IssueOrder`): Ordering options for pull requests + returned from the connection. + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + repository = sgqlc.types.Field(sgqlc.types.non_null("Repository"), graphql_name="repository") + """The repository associated with this milestone.""" + + state = sgqlc.types.Field(sgqlc.types.non_null(MilestoneState), graphql_name="state") + """Identifies the state of the milestone.""" + + title = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="title") + """Identifies the title of the milestone.""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + +class MilestonedEvent(sgqlc.types.Type, Node): + """Represents a 'milestoned' event on a given issue or pull request.""" + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "milestone_title", "subject") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + milestone_title = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="milestoneTitle") + """Identifies the milestone title associated with the 'milestoned' + event. + """ + + subject = sgqlc.types.Field(sgqlc.types.non_null("MilestoneItem"), graphql_name="subject") + """Object referenced by event.""" + + +class MovedColumnsInProjectEvent(sgqlc.types.Type, Node): + """Represents a 'moved_columns_in_project' event on a given issue or + pull request. + """ + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "database_id") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + +class OIDCProvider(sgqlc.types.Type, Node): + """An OIDC identity provider configured to provision identities for + an enterprise. + """ + + __schema__ = github_schema + __field_names__ = ("enterprise", "external_identities", "provider_type", "tenant_id") + enterprise = sgqlc.types.Field(Enterprise, graphql_name="enterprise") + """The enterprise this identity provider belongs to.""" + + external_identities = sgqlc.types.Field( + sgqlc.types.non_null(ExternalIdentityConnection), + graphql_name="externalIdentities", + args=sgqlc.types.ArgDict( + ( + ("members_only", sgqlc.types.Arg(Boolean, graphql_name="membersOnly", default=None)), + ("login", sgqlc.types.Arg(String, graphql_name="login", default=None)), + ("user_name", sgqlc.types.Arg(String, graphql_name="userName", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """ExternalIdentities provisioned by this identity provider. + + Arguments: + + * `members_only` (`Boolean`): Filter to external identities with + valid org membership only + * `login` (`String`): Filter to external identities with the users + login + * `user_name` (`String`): Filter to external identities with the + users userName/NameID attribute + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + provider_type = sgqlc.types.Field(sgqlc.types.non_null(OIDCProviderType), graphql_name="providerType") + """The OIDC identity provider type""" + + tenant_id = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="tenantId") + """The id of the tenant this provider is attached to""" + + +class OauthApplicationCreateAuditEntry(sgqlc.types.Type, Node, AuditEntry, OauthApplicationAuditEntryData, OrganizationAuditEntryData): + """Audit log entry for a oauth_application.create event.""" + + __schema__ = github_schema + __field_names__ = ("application_url", "callback_url", "rate_limit", "state") + application_url = sgqlc.types.Field(URI, graphql_name="applicationUrl") + """The application URL of the OAuth Application.""" + + callback_url = sgqlc.types.Field(URI, graphql_name="callbackUrl") + """The callback URL of the OAuth Application.""" + + rate_limit = sgqlc.types.Field(Int, graphql_name="rateLimit") + """The rate limit of the OAuth Application.""" + + state = sgqlc.types.Field(OauthApplicationCreateAuditEntryState, graphql_name="state") + """The state of the OAuth Application.""" + + +class OrgAddBillingManagerAuditEntry(sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData): + """Audit log entry for a org.add_billing_manager""" + + __schema__ = github_schema + __field_names__ = ("invitation_email",) + invitation_email = sgqlc.types.Field(String, graphql_name="invitationEmail") + """The email address used to invite a billing manager for the + organization. + """ + + +class OrgAddMemberAuditEntry(sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData): + """Audit log entry for a org.add_member""" + + __schema__ = github_schema + __field_names__ = ("permission",) + permission = sgqlc.types.Field(OrgAddMemberAuditEntryPermission, graphql_name="permission") + """The permission level of the member added to the organization.""" + + +class OrgBlockUserAuditEntry(sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData): + """Audit log entry for a org.block_user""" + + __schema__ = github_schema + __field_names__ = ("blocked_user", "blocked_user_name", "blocked_user_resource_path", "blocked_user_url") + blocked_user = sgqlc.types.Field("User", graphql_name="blockedUser") + """The blocked user.""" + + blocked_user_name = sgqlc.types.Field(String, graphql_name="blockedUserName") + """The username of the blocked user.""" + + blocked_user_resource_path = sgqlc.types.Field(URI, graphql_name="blockedUserResourcePath") + """The HTTP path for the blocked user.""" + + blocked_user_url = sgqlc.types.Field(URI, graphql_name="blockedUserUrl") + """The HTTP URL for the blocked user.""" + + +class OrgConfigDisableCollaboratorsOnlyAuditEntry(sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData): + """Audit log entry for a org.config.disable_collaborators_only event.""" + + __schema__ = github_schema + __field_names__ = () + + +class OrgConfigEnableCollaboratorsOnlyAuditEntry(sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData): + """Audit log entry for a org.config.enable_collaborators_only event.""" + + __schema__ = github_schema + __field_names__ = () + + +class OrgCreateAuditEntry(sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData): + """Audit log entry for a org.create event.""" + + __schema__ = github_schema + __field_names__ = ("billing_plan",) + billing_plan = sgqlc.types.Field(OrgCreateAuditEntryBillingPlan, graphql_name="billingPlan") + """The billing plan for the Organization.""" + + +class OrgDisableOauthAppRestrictionsAuditEntry(sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData): + """Audit log entry for a org.disable_oauth_app_restrictions event.""" + + __schema__ = github_schema + __field_names__ = () + + +class OrgDisableSamlAuditEntry(sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData): + """Audit log entry for a org.disable_saml event.""" + + __schema__ = github_schema + __field_names__ = ("digest_method_url", "issuer_url", "signature_method_url", "single_sign_on_url") + digest_method_url = sgqlc.types.Field(URI, graphql_name="digestMethodUrl") + """The SAML provider's digest algorithm URL.""" + + issuer_url = sgqlc.types.Field(URI, graphql_name="issuerUrl") + """The SAML provider's issuer URL.""" + + signature_method_url = sgqlc.types.Field(URI, graphql_name="signatureMethodUrl") + """The SAML provider's signature algorithm URL.""" + + single_sign_on_url = sgqlc.types.Field(URI, graphql_name="singleSignOnUrl") + """The SAML provider's single sign-on URL.""" + + +class OrgDisableTwoFactorRequirementAuditEntry(sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData): + """Audit log entry for a org.disable_two_factor_requirement event.""" + + __schema__ = github_schema + __field_names__ = () + + +class OrgEnableOauthAppRestrictionsAuditEntry(sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData): + """Audit log entry for a org.enable_oauth_app_restrictions event.""" + + __schema__ = github_schema + __field_names__ = () + + +class OrgEnableSamlAuditEntry(sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData): + """Audit log entry for a org.enable_saml event.""" + + __schema__ = github_schema + __field_names__ = ("digest_method_url", "issuer_url", "signature_method_url", "single_sign_on_url") + digest_method_url = sgqlc.types.Field(URI, graphql_name="digestMethodUrl") + """The SAML provider's digest algorithm URL.""" + + issuer_url = sgqlc.types.Field(URI, graphql_name="issuerUrl") + """The SAML provider's issuer URL.""" + + signature_method_url = sgqlc.types.Field(URI, graphql_name="signatureMethodUrl") + """The SAML provider's signature algorithm URL.""" + + single_sign_on_url = sgqlc.types.Field(URI, graphql_name="singleSignOnUrl") + """The SAML provider's single sign-on URL.""" + + +class OrgEnableTwoFactorRequirementAuditEntry(sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData): + """Audit log entry for a org.enable_two_factor_requirement event.""" + + __schema__ = github_schema + __field_names__ = () + + +class OrgInviteMemberAuditEntry(sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData): + """Audit log entry for a org.invite_member event.""" + + __schema__ = github_schema + __field_names__ = ("email", "organization_invitation") + email = sgqlc.types.Field(String, graphql_name="email") + """The email address of the organization invitation.""" + + organization_invitation = sgqlc.types.Field("OrganizationInvitation", graphql_name="organizationInvitation") + """The organization invitation.""" + + +class OrgInviteToBusinessAuditEntry(sgqlc.types.Type, Node, AuditEntry, EnterpriseAuditEntryData, OrganizationAuditEntryData): + """Audit log entry for a org.invite_to_business event.""" + + __schema__ = github_schema + __field_names__ = () + + +class OrgOauthAppAccessApprovedAuditEntry(sgqlc.types.Type, Node, AuditEntry, OauthApplicationAuditEntryData, OrganizationAuditEntryData): + """Audit log entry for a org.oauth_app_access_approved event.""" + + __schema__ = github_schema + __field_names__ = () + + +class OrgOauthAppAccessDeniedAuditEntry(sgqlc.types.Type, Node, AuditEntry, OauthApplicationAuditEntryData, OrganizationAuditEntryData): + """Audit log entry for a org.oauth_app_access_denied event.""" + + __schema__ = github_schema + __field_names__ = () + + +class OrgOauthAppAccessRequestedAuditEntry(sgqlc.types.Type, Node, AuditEntry, OauthApplicationAuditEntryData, OrganizationAuditEntryData): + """Audit log entry for a org.oauth_app_access_requested event.""" + + __schema__ = github_schema + __field_names__ = () + + +class OrgRemoveBillingManagerAuditEntry(sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData): + """Audit log entry for a org.remove_billing_manager event.""" + + __schema__ = github_schema + __field_names__ = ("reason",) + reason = sgqlc.types.Field(OrgRemoveBillingManagerAuditEntryReason, graphql_name="reason") + """The reason for the billing manager being removed.""" + + +class OrgRemoveMemberAuditEntry(sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData): + """Audit log entry for a org.remove_member event.""" + + __schema__ = github_schema + __field_names__ = ("membership_types", "reason") + membership_types = sgqlc.types.Field( + sgqlc.types.list_of(sgqlc.types.non_null(OrgRemoveMemberAuditEntryMembershipType)), graphql_name="membershipTypes" + ) + """The types of membership the member has with the organization.""" + + reason = sgqlc.types.Field(OrgRemoveMemberAuditEntryReason, graphql_name="reason") + """The reason for the member being removed.""" + + +class OrgRemoveOutsideCollaboratorAuditEntry(sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData): + """Audit log entry for a org.remove_outside_collaborator event.""" + + __schema__ = github_schema + __field_names__ = ("membership_types", "reason") + membership_types = sgqlc.types.Field( + sgqlc.types.list_of(sgqlc.types.non_null(OrgRemoveOutsideCollaboratorAuditEntryMembershipType)), graphql_name="membershipTypes" + ) + """The types of membership the outside collaborator has with the + organization. + """ + + reason = sgqlc.types.Field(OrgRemoveOutsideCollaboratorAuditEntryReason, graphql_name="reason") + """The reason for the outside collaborator being removed from the + Organization. + """ + + +class OrgRestoreMemberAuditEntry(sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData): + """Audit log entry for a org.restore_member event.""" + + __schema__ = github_schema + __field_names__ = ( + "restored_custom_email_routings_count", + "restored_issue_assignments_count", + "restored_memberships", + "restored_memberships_count", + "restored_repositories_count", + "restored_repository_stars_count", + "restored_repository_watches_count", + ) + restored_custom_email_routings_count = sgqlc.types.Field(Int, graphql_name="restoredCustomEmailRoutingsCount") + """The number of custom email routings for the restored member.""" + + restored_issue_assignments_count = sgqlc.types.Field(Int, graphql_name="restoredIssueAssignmentsCount") + """The number of issue assignments for the restored member.""" + + restored_memberships = sgqlc.types.Field( + sgqlc.types.list_of(sgqlc.types.non_null("OrgRestoreMemberAuditEntryMembership")), graphql_name="restoredMemberships" + ) + """Restored organization membership objects.""" + + restored_memberships_count = sgqlc.types.Field(Int, graphql_name="restoredMembershipsCount") + """The number of restored memberships.""" + + restored_repositories_count = sgqlc.types.Field(Int, graphql_name="restoredRepositoriesCount") + """The number of repositories of the restored member.""" + + restored_repository_stars_count = sgqlc.types.Field(Int, graphql_name="restoredRepositoryStarsCount") + """The number of starred repositories for the restored member.""" + + restored_repository_watches_count = sgqlc.types.Field(Int, graphql_name="restoredRepositoryWatchesCount") + """The number of watched repositories for the restored member.""" + + +class OrgRestoreMemberMembershipOrganizationAuditEntryData(sgqlc.types.Type, OrganizationAuditEntryData): + """Metadata for an organization membership for org.restore_member + actions + """ + + __schema__ = github_schema + __field_names__ = () + + +class OrgRestoreMemberMembershipRepositoryAuditEntryData(sgqlc.types.Type, RepositoryAuditEntryData): + """Metadata for a repository membership for org.restore_member + actions + """ + + __schema__ = github_schema + __field_names__ = () + + +class OrgRestoreMemberMembershipTeamAuditEntryData(sgqlc.types.Type, TeamAuditEntryData): + """Metadata for a team membership for org.restore_member actions""" + + __schema__ = github_schema + __field_names__ = () + + +class OrgUnblockUserAuditEntry(sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData): + """Audit log entry for a org.unblock_user""" + + __schema__ = github_schema + __field_names__ = ("blocked_user", "blocked_user_name", "blocked_user_resource_path", "blocked_user_url") + blocked_user = sgqlc.types.Field("User", graphql_name="blockedUser") + """The user being unblocked by the organization.""" + + blocked_user_name = sgqlc.types.Field(String, graphql_name="blockedUserName") + """The username of the blocked user.""" + + blocked_user_resource_path = sgqlc.types.Field(URI, graphql_name="blockedUserResourcePath") + """The HTTP path for the blocked user.""" + + blocked_user_url = sgqlc.types.Field(URI, graphql_name="blockedUserUrl") + """The HTTP URL for the blocked user.""" + + +class OrgUpdateDefaultRepositoryPermissionAuditEntry(sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData): + """Audit log entry for a org.update_default_repository_permission""" + + __schema__ = github_schema + __field_names__ = ("permission", "permission_was") + permission = sgqlc.types.Field(OrgUpdateDefaultRepositoryPermissionAuditEntryPermission, graphql_name="permission") + """The new base repository permission level for the organization.""" + + permission_was = sgqlc.types.Field(OrgUpdateDefaultRepositoryPermissionAuditEntryPermission, graphql_name="permissionWas") + """The former base repository permission level for the organization.""" + + +class OrgUpdateMemberAuditEntry(sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData): + """Audit log entry for a org.update_member event.""" + + __schema__ = github_schema + __field_names__ = ("permission", "permission_was") + permission = sgqlc.types.Field(OrgUpdateMemberAuditEntryPermission, graphql_name="permission") + """The new member permission level for the organization.""" + + permission_was = sgqlc.types.Field(OrgUpdateMemberAuditEntryPermission, graphql_name="permissionWas") + """The former member permission level for the organization.""" + + +class OrgUpdateMemberRepositoryCreationPermissionAuditEntry(sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData): + """Audit log entry for a + org.update_member_repository_creation_permission event. + """ + + __schema__ = github_schema + __field_names__ = ("can_create_repositories", "visibility") + can_create_repositories = sgqlc.types.Field(Boolean, graphql_name="canCreateRepositories") + """Can members create repositories in the organization.""" + + visibility = sgqlc.types.Field(OrgUpdateMemberRepositoryCreationPermissionAuditEntryVisibility, graphql_name="visibility") + """The permission for visibility level of repositories for this + organization. + """ + + +class OrgUpdateMemberRepositoryInvitationPermissionAuditEntry(sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData): + """Audit log entry for a + org.update_member_repository_invitation_permission event. + """ + + __schema__ = github_schema + __field_names__ = ("can_invite_outside_collaborators_to_repositories",) + can_invite_outside_collaborators_to_repositories = sgqlc.types.Field( + Boolean, graphql_name="canInviteOutsideCollaboratorsToRepositories" + ) + """Can outside collaborators be invited to repositories in the + organization. + """ + + +class Organization( + sgqlc.types.Type, + Node, + Actor, + PackageOwner, + ProjectOwner, + ProjectNextOwner, + RepositoryDiscussionAuthor, + RepositoryDiscussionCommentAuthor, + RepositoryOwner, + UniformResourceLocatable, + MemberStatusable, + ProfileOwner, + Sponsorable, +): + """An account on GitHub, with one or more owners, that has + repositories, members and teams. + """ + + __schema__ = github_schema + __field_names__ = ( + "audit_log", + "created_at", + "database_id", + "description", + "description_html", + "domains", + "enterprise_owners", + "interaction_ability", + "ip_allow_list_enabled_setting", + "ip_allow_list_entries", + "ip_allow_list_for_installed_apps_enabled_setting", + "is_verified", + "members_can_fork_private_repositories", + "members_with_role", + "new_team_resource_path", + "new_team_url", + "notification_delivery_restriction_enabled_setting", + "organization_billing_email", + "pending_members", + "repository_migrations", + "requires_two_factor_authentication", + "saml_identity_provider", + "team", + "teams", + "teams_resource_path", + "teams_url", + "twitter_username", + "updated_at", + "viewer_can_administer", + "viewer_can_create_repositories", + "viewer_can_create_teams", + "viewer_is_amember", + "viewer_is_following", + ) + audit_log = sgqlc.types.Field( + sgqlc.types.non_null(OrganizationAuditEntryConnection), + graphql_name="auditLog", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("query", sgqlc.types.Arg(String, graphql_name="query", default=None)), + ("order_by", sgqlc.types.Arg(AuditLogOrder, graphql_name="orderBy", default={"field": "CREATED_AT", "direction": "DESC"})), + ) + ), + ) + """Audit log entries of the organization + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `query` (`String`): The query string to filter audit entries + * `order_by` (`AuditLogOrder`): Ordering options for the returned + audit log entries. (default: `{field: CREATED_AT, direction: + DESC}`) + """ + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + description = sgqlc.types.Field(String, graphql_name="description") + """The organization's public profile description.""" + + description_html = sgqlc.types.Field(String, graphql_name="descriptionHTML") + """The organization's public profile description rendered to HTML.""" + + domains = sgqlc.types.Field( + VerifiableDomainConnection, + graphql_name="domains", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("is_verified", sgqlc.types.Arg(Boolean, graphql_name="isVerified", default=None)), + ("is_approved", sgqlc.types.Arg(Boolean, graphql_name="isApproved", default=None)), + ( + "order_by", + sgqlc.types.Arg(VerifiableDomainOrder, graphql_name="orderBy", default={"field": "DOMAIN", "direction": "ASC"}), + ), + ) + ), + ) + """A list of domains owned by the organization. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `is_verified` (`Boolean`): Filter by if the domain is verified. + (default: `null`) + * `is_approved` (`Boolean`): Filter by if the domain is approved. + (default: `null`) + * `order_by` (`VerifiableDomainOrder`): Ordering options for + verifiable domains returned. (default: `{field: DOMAIN, + direction: ASC}`) + """ + + enterprise_owners = sgqlc.types.Field( + sgqlc.types.non_null(OrganizationEnterpriseOwnerConnection), + graphql_name="enterpriseOwners", + args=sgqlc.types.ArgDict( + ( + ("query", sgqlc.types.Arg(String, graphql_name="query", default=None)), + ("organization_role", sgqlc.types.Arg(RoleInOrganization, graphql_name="organizationRole", default=None)), + ( + "order_by", + sgqlc.types.Arg(OrgEnterpriseOwnerOrder, graphql_name="orderBy", default={"field": "LOGIN", "direction": "ASC"}), + ), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of owners of the organization's enterprise account. + + Arguments: + + * `query` (`String`): The search string to look for. + * `organization_role` (`RoleInOrganization`): The organization + role to filter by. + * `order_by` (`OrgEnterpriseOwnerOrder`): Ordering options for + enterprise owners returned from the connection. (default: + `{field: LOGIN, direction: ASC}`) + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + interaction_ability = sgqlc.types.Field(RepositoryInteractionAbility, graphql_name="interactionAbility") + """The interaction ability settings for this organization.""" + + ip_allow_list_enabled_setting = sgqlc.types.Field( + sgqlc.types.non_null(IpAllowListEnabledSettingValue), graphql_name="ipAllowListEnabledSetting" + ) + """The setting value for whether the organization has an IP allow + list enabled. + """ + + ip_allow_list_entries = sgqlc.types.Field( + sgqlc.types.non_null(IpAllowListEntryConnection), + graphql_name="ipAllowListEntries", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ( + "order_by", + sgqlc.types.Arg( + IpAllowListEntryOrder, graphql_name="orderBy", default={"field": "ALLOW_LIST_VALUE", "direction": "ASC"} + ), + ), + ) + ), + ) + """The IP addresses that are allowed to access resources owned by the + organization. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `order_by` (`IpAllowListEntryOrder`): Ordering options for IP + allow list entries returned. (default: `{field: + ALLOW_LIST_VALUE, direction: ASC}`) + """ + + ip_allow_list_for_installed_apps_enabled_setting = sgqlc.types.Field( + sgqlc.types.non_null(IpAllowListForInstalledAppsEnabledSettingValue), graphql_name="ipAllowListForInstalledAppsEnabledSetting" + ) + """The setting value for whether the organization has IP allow list + configuration for installed GitHub Apps enabled. + """ + + is_verified = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isVerified") + """Whether the organization has verified its profile email and + website. + """ + + members_can_fork_private_repositories = sgqlc.types.Field( + sgqlc.types.non_null(Boolean), graphql_name="membersCanForkPrivateRepositories" + ) + """Members can fork private repositories in this organization""" + + members_with_role = sgqlc.types.Field( + sgqlc.types.non_null(OrganizationMemberConnection), + graphql_name="membersWithRole", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of users who are members of this organization. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + new_team_resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="newTeamResourcePath") + """The HTTP path creating a new team""" + + new_team_url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="newTeamUrl") + """The HTTP URL creating a new team""" + + notification_delivery_restriction_enabled_setting = sgqlc.types.Field( + sgqlc.types.non_null(NotificationRestrictionSettingValue), graphql_name="notificationDeliveryRestrictionEnabledSetting" + ) + """Indicates if email notification delivery for this organization is + restricted to verified or approved domains. + """ + + organization_billing_email = sgqlc.types.Field(String, graphql_name="organizationBillingEmail") + """The billing email for the organization.""" + + pending_members = sgqlc.types.Field( + sgqlc.types.non_null(UserConnection), + graphql_name="pendingMembers", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of users who have been invited to join this organization. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + repository_migrations = sgqlc.types.Field( + sgqlc.types.non_null(RepositoryMigrationConnection), + graphql_name="repositoryMigrations", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("state", sgqlc.types.Arg(MigrationState, graphql_name="state", default=None)), + ("repository_name", sgqlc.types.Arg(String, graphql_name="repositoryName", default=None)), + ( + "order_by", + sgqlc.types.Arg(RepositoryMigrationOrder, graphql_name="orderBy", default={"field": "CREATED_AT", "direction": "ASC"}), + ), + ) + ), + ) + """A list of all repository migrations for this organization. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `state` (`MigrationState`): Filter repository migrations by + state. + * `repository_name` (`String`): Filter repository migrations by + repository name. + * `order_by` (`RepositoryMigrationOrder`): Ordering options for + repository migrations returned. (default: `{field: CREATED_AT, + direction: ASC}`) + """ + + requires_two_factor_authentication = sgqlc.types.Field(Boolean, graphql_name="requiresTwoFactorAuthentication") + """When true the organization requires all members, billing managers, + and outside collaborators to enable two-factor authentication. + """ + + saml_identity_provider = sgqlc.types.Field("OrganizationIdentityProvider", graphql_name="samlIdentityProvider") + """The Organization's SAML identity providers""" + + team = sgqlc.types.Field( + "Team", + graphql_name="team", + args=sgqlc.types.ArgDict((("slug", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="slug", default=None)),)), + ) + """Find an organization's team by its slug. + + Arguments: + + * `slug` (`String!`): The name or slug of the team to find. + """ + + teams = sgqlc.types.Field( + sgqlc.types.non_null(TeamConnection), + graphql_name="teams", + args=sgqlc.types.ArgDict( + ( + ("privacy", sgqlc.types.Arg(TeamPrivacy, graphql_name="privacy", default=None)), + ("role", sgqlc.types.Arg(TeamRole, graphql_name="role", default=None)), + ("query", sgqlc.types.Arg(String, graphql_name="query", default=None)), + ( + "user_logins", + sgqlc.types.Arg(sgqlc.types.list_of(sgqlc.types.non_null(String)), graphql_name="userLogins", default=None), + ), + ("order_by", sgqlc.types.Arg(TeamOrder, graphql_name="orderBy", default=None)), + ("ldap_mapped", sgqlc.types.Arg(Boolean, graphql_name="ldapMapped", default=None)), + ("root_teams_only", sgqlc.types.Arg(Boolean, graphql_name="rootTeamsOnly", default=False)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of teams in this organization. + + Arguments: + + * `privacy` (`TeamPrivacy`): If non-null, filters teams according + to privacy + * `role` (`TeamRole`): If non-null, filters teams according to + whether the viewer is an admin or member on team + * `query` (`String`): If non-null, filters teams with query on + team name and team slug + * `user_logins` (`[String!]`): User logins to filter by + * `order_by` (`TeamOrder`): Ordering options for teams returned + from the connection + * `ldap_mapped` (`Boolean`): If true, filters teams that are + mapped to an LDAP Group (Enterprise only) + * `root_teams_only` (`Boolean`): If true, restrict to only root + teams (default: `false`) + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + teams_resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="teamsResourcePath") + """The HTTP path listing organization's teams""" + + teams_url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="teamsUrl") + """The HTTP URL listing organization's teams""" + + twitter_username = sgqlc.types.Field(String, graphql_name="twitterUsername") + """The organization's Twitter username.""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + viewer_can_administer = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanAdminister") + """Organization is adminable by the viewer.""" + + viewer_can_create_repositories = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanCreateRepositories") + """Viewer can create repositories on this organization""" + + viewer_can_create_teams = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanCreateTeams") + """Viewer can create teams on this organization.""" + + viewer_is_amember = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerIsAMember") + """Viewer is an active member of this organization.""" + + viewer_is_following = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerIsFollowing") + """Whether or not this Organization is followed by the viewer.""" + + +class OrganizationIdentityProvider(sgqlc.types.Type, Node): + """An Identity Provider configured to provision SAML and SCIM + identities for Organizations + """ + + __schema__ = github_schema + __field_names__ = ("digest_method", "external_identities", "idp_certificate", "issuer", "organization", "signature_method", "sso_url") + digest_method = sgqlc.types.Field(URI, graphql_name="digestMethod") + """The digest algorithm used to sign SAML requests for the Identity + Provider. + """ + + external_identities = sgqlc.types.Field( + sgqlc.types.non_null(ExternalIdentityConnection), + graphql_name="externalIdentities", + args=sgqlc.types.ArgDict( + ( + ("members_only", sgqlc.types.Arg(Boolean, graphql_name="membersOnly", default=None)), + ("login", sgqlc.types.Arg(String, graphql_name="login", default=None)), + ("user_name", sgqlc.types.Arg(String, graphql_name="userName", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """External Identities provisioned by this Identity Provider + + Arguments: + + * `members_only` (`Boolean`): Filter to external identities with + valid org membership only + * `login` (`String`): Filter to external identities with the users + login + * `user_name` (`String`): Filter to external identities with the + users userName/NameID attribute + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + idp_certificate = sgqlc.types.Field(X509Certificate, graphql_name="idpCertificate") + """The x509 certificate used by the Identity Provider to sign + assertions and responses. + """ + + issuer = sgqlc.types.Field(String, graphql_name="issuer") + """The Issuer Entity ID for the SAML Identity Provider""" + + organization = sgqlc.types.Field(Organization, graphql_name="organization") + """Organization this Identity Provider belongs to""" + + signature_method = sgqlc.types.Field(URI, graphql_name="signatureMethod") + """The signature algorithm used to sign SAML requests for the + Identity Provider. + """ + + sso_url = sgqlc.types.Field(URI, graphql_name="ssoUrl") + """The URL endpoint for the Identity Provider's SAML SSO.""" + + +class OrganizationInvitation(sgqlc.types.Type, Node): + """An Invitation for a user to an organization.""" + + __schema__ = github_schema + __field_names__ = ("created_at", "email", "invitation_type", "invitee", "inviter", "organization", "role") + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + email = sgqlc.types.Field(String, graphql_name="email") + """The email address of the user invited to the organization.""" + + invitation_type = sgqlc.types.Field(sgqlc.types.non_null(OrganizationInvitationType), graphql_name="invitationType") + """The type of invitation that was sent (e.g. email, user).""" + + invitee = sgqlc.types.Field("User", graphql_name="invitee") + """The user who was invited to the organization.""" + + inviter = sgqlc.types.Field(sgqlc.types.non_null("User"), graphql_name="inviter") + """The user who created the invitation.""" + + organization = sgqlc.types.Field(sgqlc.types.non_null(Organization), graphql_name="organization") + """The organization the invite is for""" + + role = sgqlc.types.Field(sgqlc.types.non_null(OrganizationInvitationRole), graphql_name="role") + """The user's pending role in the organization (e.g. member, owner).""" + + +class OrganizationTeamsHovercardContext(sgqlc.types.Type, HovercardContext): + """An organization teams hovercard context""" + + __schema__ = github_schema + __field_names__ = ("relevant_teams", "teams_resource_path", "teams_url", "total_team_count") + relevant_teams = sgqlc.types.Field( + sgqlc.types.non_null(TeamConnection), + graphql_name="relevantTeams", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """Teams in this organization the user is a member of that are + relevant + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + teams_resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="teamsResourcePath") + """The path for the full team list for this user""" + + teams_url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="teamsUrl") + """The URL for the full team list for this user""" + + total_team_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalTeamCount") + """The total number of teams the user is on in the organization""" + + +class OrganizationsHovercardContext(sgqlc.types.Type, HovercardContext): + """An organization list hovercard context""" + + __schema__ = github_schema + __field_names__ = ("relevant_organizations", "total_organization_count") + relevant_organizations = sgqlc.types.Field( + sgqlc.types.non_null(OrganizationConnection), + graphql_name="relevantOrganizations", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """Organizations this user is a member of that are relevant + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + total_organization_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalOrganizationCount") + """The total number of organizations this user is in""" + + +class Package(sgqlc.types.Type, Node): + """Information for an uploaded package.""" + + __schema__ = github_schema + __field_names__ = ("latest_version", "name", "package_type", "repository", "statistics", "version", "versions") + latest_version = sgqlc.types.Field("PackageVersion", graphql_name="latestVersion") + """Find the latest version for the package.""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """Identifies the name of the package.""" + + package_type = sgqlc.types.Field(sgqlc.types.non_null(PackageType), graphql_name="packageType") + """Identifies the type of the package.""" + + repository = sgqlc.types.Field("Repository", graphql_name="repository") + """The repository this package belongs to.""" + + statistics = sgqlc.types.Field(PackageStatistics, graphql_name="statistics") + """Statistics about package activity.""" + + version = sgqlc.types.Field( + "PackageVersion", + graphql_name="version", + args=sgqlc.types.ArgDict((("version", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="version", default=None)),)), + ) + """Find package version by version string. + + Arguments: + + * `version` (`String!`): The package version. + """ + + versions = sgqlc.types.Field( + sgqlc.types.non_null(PackageVersionConnection), + graphql_name="versions", + args=sgqlc.types.ArgDict( + ( + ( + "order_by", + sgqlc.types.Arg(PackageVersionOrder, graphql_name="orderBy", default={"field": "CREATED_AT", "direction": "DESC"}), + ), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """list of versions for this package + + Arguments: + + * `order_by` (`PackageVersionOrder`): Ordering of the returned + packages. (default: `{field: CREATED_AT, direction: DESC}`) + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + +class PackageFile(sgqlc.types.Type, Node): + """A file in a package version.""" + + __schema__ = github_schema + __field_names__ = ("md5", "name", "package_version", "sha1", "sha256", "size", "updated_at", "url") + md5 = sgqlc.types.Field(String, graphql_name="md5") + """MD5 hash of the file.""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """Name of the file.""" + + package_version = sgqlc.types.Field("PackageVersion", graphql_name="packageVersion") + """The package version this file belongs to.""" + + sha1 = sgqlc.types.Field(String, graphql_name="sha1") + """SHA1 hash of the file.""" + + sha256 = sgqlc.types.Field(String, graphql_name="sha256") + """SHA256 hash of the file.""" + + size = sgqlc.types.Field(Int, graphql_name="size") + """Size of the file in bytes.""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + url = sgqlc.types.Field(URI, graphql_name="url") + """URL to download the asset.""" + + +class PackageTag(sgqlc.types.Type, Node): + """A version tag contains the mapping between a tag name and a + version. + """ + + __schema__ = github_schema + __field_names__ = ("name", "version") + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """Identifies the tag name of the version.""" + + version = sgqlc.types.Field("PackageVersion", graphql_name="version") + """Version that the tag is associated with.""" + + +class PackageVersion(sgqlc.types.Type, Node): + """Information about a specific package version.""" + + __schema__ = github_schema + __field_names__ = ("files", "package", "platform", "pre_release", "readme", "release", "statistics", "summary", "version") + files = sgqlc.types.Field( + sgqlc.types.non_null(PackageFileConnection), + graphql_name="files", + args=sgqlc.types.ArgDict( + ( + ( + "order_by", + sgqlc.types.Arg(PackageFileOrder, graphql_name="orderBy", default={"field": "CREATED_AT", "direction": "ASC"}), + ), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """List of files associated with this package version + + Arguments: + + * `order_by` (`PackageFileOrder`): Ordering of the returned + package files. (default: `{field: CREATED_AT, direction: ASC}`) + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + package = sgqlc.types.Field(Package, graphql_name="package") + """The package associated with this version.""" + + platform = sgqlc.types.Field(String, graphql_name="platform") + """The platform this version was built for.""" + + pre_release = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="preRelease") + """Whether or not this version is a pre-release.""" + + readme = sgqlc.types.Field(String, graphql_name="readme") + """The README of this package version.""" + + release = sgqlc.types.Field("Release", graphql_name="release") + """The release associated with this package version.""" + + statistics = sgqlc.types.Field(PackageVersionStatistics, graphql_name="statistics") + """Statistics about package activity.""" + + summary = sgqlc.types.Field(String, graphql_name="summary") + """The package version summary.""" + + version = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="version") + """The version string.""" + + +class PinnedDiscussion(sgqlc.types.Type, Node, RepositoryNode): + """A Pinned Discussion is a discussion pinned to a repository's index + page. + """ + + __schema__ = github_schema + __field_names__ = ( + "created_at", + "database_id", + "discussion", + "gradient_stop_colors", + "pattern", + "pinned_by", + "preconfigured_gradient", + "updated_at", + ) + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + discussion = sgqlc.types.Field(sgqlc.types.non_null(Discussion), graphql_name="discussion") + """The discussion that was pinned.""" + + gradient_stop_colors = sgqlc.types.Field( + sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(String))), graphql_name="gradientStopColors" + ) + """Color stops of the chosen gradient""" + + pattern = sgqlc.types.Field(sgqlc.types.non_null(PinnedDiscussionPattern), graphql_name="pattern") + """Background texture pattern""" + + pinned_by = sgqlc.types.Field(sgqlc.types.non_null(Actor), graphql_name="pinnedBy") + """The actor that pinned this discussion.""" + + preconfigured_gradient = sgqlc.types.Field(PinnedDiscussionGradient, graphql_name="preconfiguredGradient") + """Preconfigured background gradient option""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + +class PinnedEvent(sgqlc.types.Type, Node): + """Represents a 'pinned' event on a given issue or pull request.""" + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "issue") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + issue = sgqlc.types.Field(sgqlc.types.non_null(Issue), graphql_name="issue") + """Identifies the issue associated with the event.""" + + +class PinnedIssue(sgqlc.types.Type, Node): + """A Pinned Issue is a issue pinned to a repository's index page.""" + + __schema__ = github_schema + __field_names__ = ("database_id", "issue", "pinned_by", "repository") + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + issue = sgqlc.types.Field(sgqlc.types.non_null(Issue), graphql_name="issue") + """The issue that was pinned.""" + + pinned_by = sgqlc.types.Field(sgqlc.types.non_null(Actor), graphql_name="pinnedBy") + """The actor that pinned this issue.""" + + repository = sgqlc.types.Field(sgqlc.types.non_null("Repository"), graphql_name="repository") + """The repository that this issue was pinned to.""" + + +class PrivateRepositoryForkingDisableAuditEntry( + sgqlc.types.Type, Node, AuditEntry, EnterpriseAuditEntryData, OrganizationAuditEntryData, RepositoryAuditEntryData +): + """Audit log entry for a private_repository_forking.disable event.""" + + __schema__ = github_schema + __field_names__ = () + + +class PrivateRepositoryForkingEnableAuditEntry( + sgqlc.types.Type, Node, AuditEntry, EnterpriseAuditEntryData, OrganizationAuditEntryData, RepositoryAuditEntryData +): + """Audit log entry for a private_repository_forking.enable event.""" + + __schema__ = github_schema + __field_names__ = () + + +class Project(sgqlc.types.Type, Node, Closable, Updatable): + """Projects manage issues, pull requests and notes within a project + owner. + """ + + __schema__ = github_schema + __field_names__ = ( + "body", + "body_html", + "columns", + "created_at", + "creator", + "database_id", + "name", + "number", + "owner", + "pending_cards", + "progress", + "resource_path", + "state", + "updated_at", + "url", + ) + body = sgqlc.types.Field(String, graphql_name="body") + """The project's description body.""" + + body_html = sgqlc.types.Field(sgqlc.types.non_null(HTML), graphql_name="bodyHTML") + """The projects description body rendered to HTML.""" + + columns = sgqlc.types.Field( + sgqlc.types.non_null(ProjectColumnConnection), + graphql_name="columns", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """List of columns in the project + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + creator = sgqlc.types.Field(Actor, graphql_name="creator") + """The actor who originally created the project.""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The project's name.""" + + number = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="number") + """The project's number.""" + + owner = sgqlc.types.Field(sgqlc.types.non_null(ProjectOwner), graphql_name="owner") + """The project's owner. Currently limited to repositories, + organizations, and users. + """ + + pending_cards = sgqlc.types.Field( + sgqlc.types.non_null(ProjectCardConnection), + graphql_name="pendingCards", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ( + "archived_states", + sgqlc.types.Arg( + sgqlc.types.list_of(ProjectCardArchivedState), graphql_name="archivedStates", default=("ARCHIVED", "NOT_ARCHIVED") + ), + ), + ) + ), + ) + """List of pending cards in this project + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `archived_states` (`[ProjectCardArchivedState]`): A list of + archived states to filter the cards by (default: `[ARCHIVED, + NOT_ARCHIVED]`) + """ + + progress = sgqlc.types.Field(sgqlc.types.non_null(ProjectProgress), graphql_name="progress") + """Project progress details.""" + + resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="resourcePath") + """The HTTP path for this project""" + + state = sgqlc.types.Field(sgqlc.types.non_null(ProjectState), graphql_name="state") + """Whether the project is open or closed.""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="url") + """The HTTP URL for this project""" + + +class ProjectCard(sgqlc.types.Type, Node): + """A card in a project.""" + + __schema__ = github_schema + __field_names__ = ( + "column", + "content", + "created_at", + "creator", + "database_id", + "is_archived", + "note", + "project", + "resource_path", + "state", + "updated_at", + "url", + ) + column = sgqlc.types.Field("ProjectColumn", graphql_name="column") + """The project column this card is associated under. A card may only + belong to one project column at a time. The column field will be + null if the card is created in a pending state and has yet to be + associated with a column. Once cards are associated with a column, + they will not become pending in the future. + """ + + content = sgqlc.types.Field("ProjectCardItem", graphql_name="content") + """The card content item""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + creator = sgqlc.types.Field(Actor, graphql_name="creator") + """The actor who created this card""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + is_archived = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isArchived") + """Whether the card is archived""" + + note = sgqlc.types.Field(String, graphql_name="note") + """The card note""" + + project = sgqlc.types.Field(sgqlc.types.non_null(Project), graphql_name="project") + """The project that contains this card.""" + + resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="resourcePath") + """The HTTP path for this card""" + + state = sgqlc.types.Field(ProjectCardState, graphql_name="state") + """The state of ProjectCard""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="url") + """The HTTP URL for this card""" + + +class ProjectColumn(sgqlc.types.Type, Node): + """A column inside a project.""" + + __schema__ = github_schema + __field_names__ = ("cards", "created_at", "database_id", "name", "project", "purpose", "resource_path", "updated_at", "url") + cards = sgqlc.types.Field( + sgqlc.types.non_null(ProjectCardConnection), + graphql_name="cards", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ( + "archived_states", + sgqlc.types.Arg( + sgqlc.types.list_of(ProjectCardArchivedState), graphql_name="archivedStates", default=("ARCHIVED", "NOT_ARCHIVED") + ), + ), + ) + ), + ) + """List of cards in the column + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `archived_states` (`[ProjectCardArchivedState]`): A list of + archived states to filter the cards by (default: `[ARCHIVED, + NOT_ARCHIVED]`) + """ + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The project column's name.""" + + project = sgqlc.types.Field(sgqlc.types.non_null(Project), graphql_name="project") + """The project that contains this column.""" + + purpose = sgqlc.types.Field(ProjectColumnPurpose, graphql_name="purpose") + """The semantic purpose of the column""" + + resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="resourcePath") + """The HTTP path for this project column""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="url") + """The HTTP URL for this project column""" + + +class ProjectNext(sgqlc.types.Type, Node, Closable, Updatable): + """New projects that manage issues, pull requests and drafts using + tables and boards. + """ + + __schema__ = github_schema + __field_names__ = ( + "created_at", + "creator", + "database_id", + "description", + "fields", + "items", + "number", + "owner", + "public", + "repositories", + "resource_path", + "short_description", + "title", + "updated_at", + "url", + "views", + ) + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + creator = sgqlc.types.Field(Actor, graphql_name="creator") + """The actor who originally created the project.""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + description = sgqlc.types.Field(String, graphql_name="description") + """The project's description.""" + + fields = sgqlc.types.Field( + sgqlc.types.non_null(ProjectNextFieldConnection), + graphql_name="fields", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """List of fields in the project + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + items = sgqlc.types.Field( + sgqlc.types.non_null(ProjectNextItemConnection), + graphql_name="items", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """List of items in the project + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + number = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="number") + """The project's number.""" + + owner = sgqlc.types.Field(sgqlc.types.non_null(ProjectNextOwner), graphql_name="owner") + """The project's owner. Currently limited to organizations and users.""" + + public = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="public") + """Returns true if the project is public.""" + + repositories = sgqlc.types.Field( + sgqlc.types.non_null(RepositoryConnection), + graphql_name="repositories", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """The repositories the project is linked to. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="resourcePath") + """The HTTP path for this project""" + + short_description = sgqlc.types.Field(String, graphql_name="shortDescription") + """The project's short description.""" + + title = sgqlc.types.Field(String, graphql_name="title") + """The project's name.""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="url") + """The HTTP URL for this project""" + + views = sgqlc.types.Field( + sgqlc.types.non_null(ProjectViewConnection), + graphql_name="views", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """List of views in the project + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + +class ProjectNextField(sgqlc.types.Type, ProjectNextFieldCommon, Node): + """A field inside a project.""" + + __schema__ = github_schema + __field_names__ = () + + +class ProjectNextItem(sgqlc.types.Type, Node): + """An item within a new Project.""" + + __schema__ = github_schema + __field_names__ = ( + "content", + "created_at", + "creator", + "database_id", + "field_values", + "is_archived", + "project", + "title", + "type", + "updated_at", + ) + content = sgqlc.types.Field("ProjectNextItemContent", graphql_name="content") + """The content of the referenced draft issue, issue, or pull request""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + creator = sgqlc.types.Field(Actor, graphql_name="creator") + """The actor who created the item.""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + field_values = sgqlc.types.Field( + sgqlc.types.non_null(ProjectNextItemFieldValueConnection), + graphql_name="fieldValues", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """List of field values + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + is_archived = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isArchived") + """Whether the item is archived.""" + + project = sgqlc.types.Field(sgqlc.types.non_null(ProjectNext), graphql_name="project") + """The project that contains this item.""" + + title = sgqlc.types.Field(String, graphql_name="title") + """The title of the item""" + + type = sgqlc.types.Field(sgqlc.types.non_null(ProjectItemType), graphql_name="type") + """The type of the item.""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + +class ProjectNextItemFieldValue(sgqlc.types.Type, Node): + """An value of a field in an item of a new Project.""" + + __schema__ = github_schema + __field_names__ = ("created_at", "creator", "database_id", "project_field", "project_item", "updated_at", "value") + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + creator = sgqlc.types.Field(Actor, graphql_name="creator") + """The actor who created the item.""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + project_field = sgqlc.types.Field(sgqlc.types.non_null(ProjectNextField), graphql_name="projectField") + """The project field that contains this value.""" + + project_item = sgqlc.types.Field(sgqlc.types.non_null(ProjectNextItem), graphql_name="projectItem") + """The project item that contains this value.""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + value = sgqlc.types.Field(String, graphql_name="value") + """The value of a field""" + + +class ProjectView(sgqlc.types.Type, Node): + """A view within a Project.""" + + __schema__ = github_schema + __field_names__ = ( + "created_at", + "database_id", + "filter", + "group_by", + "items", + "layout", + "name", + "number", + "project", + "sort_by", + "updated_at", + "vertical_group_by", + "visible_fields", + ) + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + filter = sgqlc.types.Field(String, graphql_name="filter") + """The project view's filter.""" + + group_by = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(Int)), graphql_name="groupBy") + """The view's group-by field.""" + + items = sgqlc.types.Field( + sgqlc.types.non_null(ProjectNextItemConnection), + graphql_name="items", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """The view's filtered items. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + layout = sgqlc.types.Field(sgqlc.types.non_null(ProjectViewLayout), graphql_name="layout") + """The project view's layout.""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The project view's name.""" + + number = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="number") + """The project view's number.""" + + project = sgqlc.types.Field(sgqlc.types.non_null(ProjectNext), graphql_name="project") + """The project that contains this view.""" + + sort_by = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(SortBy)), graphql_name="sortBy") + """The view's sort-by config.""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + vertical_group_by = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(Int)), graphql_name="verticalGroupBy") + """The view's vertical-group-by field.""" + + visible_fields = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(Int)), graphql_name="visibleFields") + """The view's visible fields.""" + + +class PublicKey(sgqlc.types.Type, Node): + """A user's public key.""" + + __schema__ = github_schema + __field_names__ = ("accessed_at", "created_at", "fingerprint", "is_read_only", "key", "updated_at") + accessed_at = sgqlc.types.Field(DateTime, graphql_name="accessedAt") + """The last time this authorization was used to perform an action. + Values will be null for keys not owned by the user. + """ + + created_at = sgqlc.types.Field(DateTime, graphql_name="createdAt") + """Identifies the date and time when the key was created. Keys + created before March 5th, 2014 have inaccurate values. Values will + be null for keys not owned by the user. + """ + + fingerprint = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="fingerprint") + """The fingerprint for this PublicKey.""" + + is_read_only = sgqlc.types.Field(Boolean, graphql_name="isReadOnly") + """Whether this PublicKey is read-only or not. Values will be null + for keys not owned by the user. + """ + + key = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="key") + """The public key string.""" + + updated_at = sgqlc.types.Field(DateTime, graphql_name="updatedAt") + """Identifies the date and time when the key was updated. Keys + created before March 5th, 2014 may have inaccurate values. Values + will be null for keys not owned by the user. + """ + + +class PullRequest( + sgqlc.types.Type, + Node, + Assignable, + Closable, + Comment, + Updatable, + UpdatableComment, + Labelable, + Lockable, + Reactable, + RepositoryNode, + Subscribable, + UniformResourceLocatable, + ProjectNextOwner, +): + """A repository pull request.""" + + __schema__ = github_schema + __field_names__ = ( + "additions", + "auto_merge_request", + "base_ref", + "base_ref_name", + "base_ref_oid", + "base_repository", + "can_be_rebased", + "changed_files", + "checks_resource_path", + "checks_url", + "closing_issues_references", + "comments", + "commits", + "deletions", + "files", + "head_ref", + "head_ref_name", + "head_ref_oid", + "head_repository", + "head_repository_owner", + "hovercard", + "is_cross_repository", + "is_draft", + "is_read_by_viewer", + "latest_opinionated_reviews", + "latest_reviews", + "maintainer_can_modify", + "merge_commit", + "merge_state_status", + "mergeable", + "merged", + "merged_at", + "merged_by", + "milestone", + "number", + "participants", + "permalink", + "potential_merge_commit", + "project_cards", + "project_next_items", + "revert_resource_path", + "revert_url", + "review_decision", + "review_requests", + "review_threads", + "reviews", + "state", + "suggested_reviewers", + "timeline_items", + "title", + "title_html", + "viewer_can_apply_suggestion", + "viewer_can_delete_head_ref", + "viewer_can_disable_auto_merge", + "viewer_can_enable_auto_merge", + "viewer_can_merge_as_admin", + "viewer_latest_review", + "viewer_latest_review_request", + "viewer_merge_body_text", + "viewer_merge_headline_text", + ) + additions = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="additions") + """The number of additions in this pull request.""" + + auto_merge_request = sgqlc.types.Field(AutoMergeRequest, graphql_name="autoMergeRequest") + """Returns the auto-merge request object if one exists for this pull + request. + """ + + base_ref = sgqlc.types.Field("Ref", graphql_name="baseRef") + """Identifies the base Ref associated with the pull request.""" + + base_ref_name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="baseRefName") + """Identifies the name of the base Ref associated with the pull + request, even if the ref has been deleted. + """ + + base_ref_oid = sgqlc.types.Field(sgqlc.types.non_null(GitObjectID), graphql_name="baseRefOid") + """Identifies the oid of the base ref associated with the pull + request, even if the ref has been deleted. + """ + + base_repository = sgqlc.types.Field("Repository", graphql_name="baseRepository") + """The repository associated with this pull request's base Ref.""" + + can_be_rebased = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="canBeRebased") + """Whether or not the pull request is rebaseable.""" + + changed_files = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="changedFiles") + """The number of changed files in this pull request.""" + + checks_resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="checksResourcePath") + """The HTTP path for the checks of this pull request.""" + + checks_url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="checksUrl") + """The HTTP URL for the checks of this pull request.""" + + closing_issues_references = sgqlc.types.Field( + IssueConnection, + graphql_name="closingIssuesReferences", + args=sgqlc.types.ArgDict( + ( + ("user_linked_only", sgqlc.types.Arg(Boolean, graphql_name="userLinkedOnly", default=False)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("order_by", sgqlc.types.Arg(IssueOrder, graphql_name="orderBy", default=None)), + ) + ), + ) + """List of issues that were may be closed by this pull request + + Arguments: + + * `user_linked_only` (`Boolean`): Return only manually linked + Issues (default: `false`) + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `order_by` (`IssueOrder`): Ordering options for issues returned + from the connection + """ + + comments = sgqlc.types.Field( + sgqlc.types.non_null(IssueCommentConnection), + graphql_name="comments", + args=sgqlc.types.ArgDict( + ( + ("order_by", sgqlc.types.Arg(IssueCommentOrder, graphql_name="orderBy", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of comments associated with the pull request. + + Arguments: + + * `order_by` (`IssueCommentOrder`): Ordering options for issue + comments returned from the connection. + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + commits = sgqlc.types.Field( + sgqlc.types.non_null(PullRequestCommitConnection), + graphql_name="commits", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of commits present in this pull request's head branch not + present in the base branch. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + deletions = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="deletions") + """The number of deletions in this pull request.""" + + files = sgqlc.types.Field( + PullRequestChangedFileConnection, + graphql_name="files", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """Lists the files changed within this pull request. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + head_ref = sgqlc.types.Field("Ref", graphql_name="headRef") + """Identifies the head Ref associated with the pull request.""" + + head_ref_name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="headRefName") + """Identifies the name of the head Ref associated with the pull + request, even if the ref has been deleted. + """ + + head_ref_oid = sgqlc.types.Field(sgqlc.types.non_null(GitObjectID), graphql_name="headRefOid") + """Identifies the oid of the head ref associated with the pull + request, even if the ref has been deleted. + """ + + head_repository = sgqlc.types.Field("Repository", graphql_name="headRepository") + """The repository associated with this pull request's head Ref.""" + + head_repository_owner = sgqlc.types.Field(RepositoryOwner, graphql_name="headRepositoryOwner") + """The owner of the repository associated with this pull request's + head Ref. + """ + + hovercard = sgqlc.types.Field( + sgqlc.types.non_null(Hovercard), + graphql_name="hovercard", + args=sgqlc.types.ArgDict( + (("include_notification_contexts", sgqlc.types.Arg(Boolean, graphql_name="includeNotificationContexts", default=True)),) + ), + ) + """The hovercard information for this issue + + Arguments: + + * `include_notification_contexts` (`Boolean`): Whether or not to + include notification contexts (default: `true`) + """ + + is_cross_repository = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isCrossRepository") + """The head and base repositories are different.""" + + is_draft = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isDraft") + """Identifies if the pull request is a draft.""" + + is_read_by_viewer = sgqlc.types.Field(Boolean, graphql_name="isReadByViewer") + """Is this pull request read by the viewer""" + + latest_opinionated_reviews = sgqlc.types.Field( + PullRequestReviewConnection, + graphql_name="latestOpinionatedReviews", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("writers_only", sgqlc.types.Arg(Boolean, graphql_name="writersOnly", default=False)), + ) + ), + ) + """A list of latest reviews per user associated with the pull + request. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `writers_only` (`Boolean`): Only return reviews from user who + have write access to the repository (default: `false`) + """ + + latest_reviews = sgqlc.types.Field( + PullRequestReviewConnection, + graphql_name="latestReviews", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of latest reviews per user associated with the pull request + that are not also pending review. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + maintainer_can_modify = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="maintainerCanModify") + """Indicates whether maintainers can modify the pull request.""" + + merge_commit = sgqlc.types.Field(Commit, graphql_name="mergeCommit") + """The commit that was created when this pull request was merged.""" + + merge_state_status = sgqlc.types.Field(sgqlc.types.non_null(MergeStateStatus), graphql_name="mergeStateStatus") + """Detailed information about the current pull request merge state + status. + """ + + mergeable = sgqlc.types.Field(sgqlc.types.non_null(MergeableState), graphql_name="mergeable") + """Whether or not the pull request can be merged based on the + existence of merge conflicts. + """ + + merged = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="merged") + """Whether or not the pull request was merged.""" + + merged_at = sgqlc.types.Field(DateTime, graphql_name="mergedAt") + """The date and time that the pull request was merged.""" + + merged_by = sgqlc.types.Field(Actor, graphql_name="mergedBy") + """The actor who merged the pull request.""" + + milestone = sgqlc.types.Field(Milestone, graphql_name="milestone") + """Identifies the milestone associated with the pull request.""" + + number = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="number") + """Identifies the pull request number.""" + + participants = sgqlc.types.Field( + sgqlc.types.non_null(UserConnection), + graphql_name="participants", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of Users that are participating in the Pull Request + conversation. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + permalink = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="permalink") + """The permalink to the pull request.""" + + potential_merge_commit = sgqlc.types.Field(Commit, graphql_name="potentialMergeCommit") + """The commit that GitHub automatically generated to test if this + pull request could be merged. This field will not return a value + if the pull request is merged, or if the test merge commit is + still being generated. See the `mergeable` field for more details + on the mergeability of the pull request. + """ + + project_cards = sgqlc.types.Field( + sgqlc.types.non_null(ProjectCardConnection), + graphql_name="projectCards", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ( + "archived_states", + sgqlc.types.Arg( + sgqlc.types.list_of(ProjectCardArchivedState), graphql_name="archivedStates", default=("ARCHIVED", "NOT_ARCHIVED") + ), + ), + ) + ), + ) + """List of project cards associated with this pull request. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `archived_states` (`[ProjectCardArchivedState]`): A list of + archived states to filter the cards by (default: `[ARCHIVED, + NOT_ARCHIVED]`) + """ + + project_next_items = sgqlc.types.Field( + sgqlc.types.non_null(ProjectNextItemConnection), + graphql_name="projectNextItems", + args=sgqlc.types.ArgDict( + ( + ("include_archived", sgqlc.types.Arg(Boolean, graphql_name="includeArchived", default=True)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """List of project (beta) items associated with this pull request. + + Arguments: + + * `include_archived` (`Boolean`): Include archived items. + (default: `true`) + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + revert_resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="revertResourcePath") + """The HTTP path for reverting this pull request.""" + + revert_url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="revertUrl") + """The HTTP URL for reverting this pull request.""" + + review_decision = sgqlc.types.Field(PullRequestReviewDecision, graphql_name="reviewDecision") + """The current status of this pull request with respect to code + review. + """ + + review_requests = sgqlc.types.Field( + ReviewRequestConnection, + graphql_name="reviewRequests", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of review requests associated with the pull request. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + review_threads = sgqlc.types.Field( + sgqlc.types.non_null(PullRequestReviewThreadConnection), + graphql_name="reviewThreads", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """The list of all review threads for this pull request. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + reviews = sgqlc.types.Field( + PullRequestReviewConnection, + graphql_name="reviews", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ( + "states", + sgqlc.types.Arg(sgqlc.types.list_of(sgqlc.types.non_null(PullRequestReviewState)), graphql_name="states", default=None), + ), + ("author", sgqlc.types.Arg(String, graphql_name="author", default=None)), + ) + ), + ) + """A list of reviews associated with the pull request. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `states` (`[PullRequestReviewState!]`): A list of states to + filter the reviews. + * `author` (`String`): Filter by author of the review. + """ + + state = sgqlc.types.Field(sgqlc.types.non_null(PullRequestState), graphql_name="state") + """Identifies the state of the pull request.""" + + suggested_reviewers = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(SuggestedReviewer)), graphql_name="suggestedReviewers") + """A list of reviewer suggestions based on commit history and past + review comments. + """ + + timeline_items = sgqlc.types.Field( + sgqlc.types.non_null(PullRequestTimelineItemsConnection), + graphql_name="timelineItems", + args=sgqlc.types.ArgDict( + ( + ("since", sgqlc.types.Arg(DateTime, graphql_name="since", default=None)), + ("skip", sgqlc.types.Arg(Int, graphql_name="skip", default=None)), + ( + "item_types", + sgqlc.types.Arg( + sgqlc.types.list_of(sgqlc.types.non_null(PullRequestTimelineItemsItemType)), graphql_name="itemTypes", default=None + ), + ), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of events, comments, commits, etc. associated with the pull + request. + + Arguments: + + * `since` (`DateTime`): Filter timeline items by a `since` + timestamp. + * `skip` (`Int`): Skips the first _n_ elements in the list. + * `item_types` (`[PullRequestTimelineItemsItemType!]`): Filter + timeline items by type. + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + title = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="title") + """Identifies the pull request title.""" + + title_html = sgqlc.types.Field(sgqlc.types.non_null(HTML), graphql_name="titleHTML") + """Identifies the pull request title rendered to HTML.""" + + viewer_can_apply_suggestion = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanApplySuggestion") + """Whether or not the viewer can apply suggestion.""" + + viewer_can_delete_head_ref = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanDeleteHeadRef") + """Check if the viewer can restore the deleted head ref.""" + + viewer_can_disable_auto_merge = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanDisableAutoMerge") + """Whether or not the viewer can disable auto-merge""" + + viewer_can_enable_auto_merge = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanEnableAutoMerge") + """Whether or not the viewer can enable auto-merge""" + + viewer_can_merge_as_admin = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanMergeAsAdmin") + """Indicates whether the viewer can bypass branch protections and + merge the pull request immediately + """ + + viewer_latest_review = sgqlc.types.Field("PullRequestReview", graphql_name="viewerLatestReview") + """The latest review given from the viewer.""" + + viewer_latest_review_request = sgqlc.types.Field("ReviewRequest", graphql_name="viewerLatestReviewRequest") + """The person who has requested the viewer for review on this pull + request. + """ + + viewer_merge_body_text = sgqlc.types.Field( + sgqlc.types.non_null(String), + graphql_name="viewerMergeBodyText", + args=sgqlc.types.ArgDict((("merge_type", sgqlc.types.Arg(PullRequestMergeMethod, graphql_name="mergeType", default=None)),)), + ) + """The merge body text for the viewer and method. + + Arguments: + + * `merge_type` (`PullRequestMergeMethod`): The merge method for + the message. + """ + + viewer_merge_headline_text = sgqlc.types.Field( + sgqlc.types.non_null(String), + graphql_name="viewerMergeHeadlineText", + args=sgqlc.types.ArgDict((("merge_type", sgqlc.types.Arg(PullRequestMergeMethod, graphql_name="mergeType", default=None)),)), + ) + """The merge headline text for the viewer and method. + + Arguments: + + * `merge_type` (`PullRequestMergeMethod`): The merge method for + the message. + """ + + +class PullRequestCommit(sgqlc.types.Type, Node, UniformResourceLocatable): + """Represents a Git commit part of a pull request.""" + + __schema__ = github_schema + __field_names__ = ("commit", "pull_request") + commit = sgqlc.types.Field(sgqlc.types.non_null(Commit), graphql_name="commit") + """The Git commit object""" + + pull_request = sgqlc.types.Field(sgqlc.types.non_null(PullRequest), graphql_name="pullRequest") + """The pull request this commit belongs to""" + + +class PullRequestCommitCommentThread(sgqlc.types.Type, RepositoryNode, Node): + """Represents a commit comment thread part of a pull request.""" + + __schema__ = github_schema + __field_names__ = ("comments", "commit", "path", "position", "pull_request") + comments = sgqlc.types.Field( + sgqlc.types.non_null(CommitCommentConnection), + graphql_name="comments", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """The comments that exist in this thread. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + commit = sgqlc.types.Field(sgqlc.types.non_null(Commit), graphql_name="commit") + """The commit the comments were made on.""" + + path = sgqlc.types.Field(String, graphql_name="path") + """The file the comments were made on.""" + + position = sgqlc.types.Field(Int, graphql_name="position") + """The position in the diff for the commit that the comment was made + on. + """ + + pull_request = sgqlc.types.Field(sgqlc.types.non_null(PullRequest), graphql_name="pullRequest") + """The pull request this commit comment thread belongs to""" + + +class PullRequestReview(sgqlc.types.Type, Node, Comment, Deletable, Updatable, UpdatableComment, Reactable, RepositoryNode): + """A review object for a given pull request.""" + + __schema__ = github_schema + __field_names__ = ( + "author_can_push_to_repository", + "comments", + "commit", + "on_behalf_of", + "pull_request", + "resource_path", + "state", + "submitted_at", + "url", + ) + author_can_push_to_repository = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="authorCanPushToRepository") + """Indicates whether the author of this review has push access to the + repository. + """ + + comments = sgqlc.types.Field( + sgqlc.types.non_null(PullRequestReviewCommentConnection), + graphql_name="comments", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of review comments for the current pull request review. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + commit = sgqlc.types.Field(Commit, graphql_name="commit") + """Identifies the commit associated with this pull request review.""" + + on_behalf_of = sgqlc.types.Field( + sgqlc.types.non_null(TeamConnection), + graphql_name="onBehalfOf", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of teams that this review was made on behalf of. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + pull_request = sgqlc.types.Field(sgqlc.types.non_null(PullRequest), graphql_name="pullRequest") + """Identifies the pull request associated with this pull request + review. + """ + + resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="resourcePath") + """The HTTP path permalink for this PullRequestReview.""" + + state = sgqlc.types.Field(sgqlc.types.non_null(PullRequestReviewState), graphql_name="state") + """Identifies the current state of the pull request review.""" + + submitted_at = sgqlc.types.Field(DateTime, graphql_name="submittedAt") + """Identifies when the Pull Request Review was submitted""" + + url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="url") + """The HTTP URL permalink for this PullRequestReview.""" + + +class PullRequestReviewComment( + sgqlc.types.Type, Node, Comment, Deletable, Minimizable, Updatable, UpdatableComment, Reactable, RepositoryNode +): + """A review comment associated with a given repository pull request.""" + + __schema__ = github_schema + __field_names__ = ( + "commit", + "diff_hunk", + "drafted_at", + "original_commit", + "original_position", + "outdated", + "path", + "position", + "pull_request", + "pull_request_review", + "reply_to", + "resource_path", + "state", + "url", + ) + commit = sgqlc.types.Field(Commit, graphql_name="commit") + """Identifies the commit associated with the comment.""" + + diff_hunk = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="diffHunk") + """The diff hunk to which the comment applies.""" + + drafted_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="draftedAt") + """Identifies when the comment was created in a draft state.""" + + original_commit = sgqlc.types.Field(Commit, graphql_name="originalCommit") + """Identifies the original commit associated with the comment.""" + + original_position = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="originalPosition") + """The original line index in the diff to which the comment applies.""" + + outdated = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="outdated") + """Identifies when the comment body is outdated""" + + path = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="path") + """The path to which the comment applies.""" + + position = sgqlc.types.Field(Int, graphql_name="position") + """The line index in the diff to which the comment applies.""" + + pull_request = sgqlc.types.Field(sgqlc.types.non_null(PullRequest), graphql_name="pullRequest") + """The pull request associated with this review comment.""" + + pull_request_review = sgqlc.types.Field(PullRequestReview, graphql_name="pullRequestReview") + """The pull request review associated with this review comment.""" + + reply_to = sgqlc.types.Field("PullRequestReviewComment", graphql_name="replyTo") + """The comment this is a reply to.""" + + resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="resourcePath") + """The HTTP path permalink for this review comment.""" + + state = sgqlc.types.Field(sgqlc.types.non_null(PullRequestReviewCommentState), graphql_name="state") + """Identifies the state of the comment.""" + + url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="url") + """The HTTP URL permalink for this review comment.""" + + +class PullRequestReviewThread(sgqlc.types.Type, Node): + """A threaded list of comments for a given pull request.""" + + __schema__ = github_schema + __field_names__ = ( + "comments", + "diff_side", + "is_collapsed", + "is_outdated", + "is_resolved", + "line", + "original_line", + "original_start_line", + "path", + "pull_request", + "repository", + "resolved_by", + "start_diff_side", + "start_line", + "viewer_can_reply", + "viewer_can_resolve", + "viewer_can_unresolve", + ) + comments = sgqlc.types.Field( + sgqlc.types.non_null(PullRequestReviewCommentConnection), + graphql_name="comments", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("skip", sgqlc.types.Arg(Int, graphql_name="skip", default=None)), + ) + ), + ) + """A list of pull request comments associated with the thread. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `skip` (`Int`): Skips the first _n_ elements in the list. + """ + + diff_side = sgqlc.types.Field(sgqlc.types.non_null(DiffSide), graphql_name="diffSide") + """The side of the diff on which this thread was placed.""" + + is_collapsed = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isCollapsed") + """Whether or not the thread has been collapsed (resolved)""" + + is_outdated = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isOutdated") + """Indicates whether this thread was outdated by newer changes.""" + + is_resolved = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isResolved") + """Whether this thread has been resolved""" + + line = sgqlc.types.Field(Int, graphql_name="line") + """The line in the file to which this thread refers""" + + original_line = sgqlc.types.Field(Int, graphql_name="originalLine") + """The original line in the file to which this thread refers.""" + + original_start_line = sgqlc.types.Field(Int, graphql_name="originalStartLine") + """The original start line in the file to which this thread refers + (multi-line only). + """ + + path = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="path") + """Identifies the file path of this thread.""" + + pull_request = sgqlc.types.Field(sgqlc.types.non_null(PullRequest), graphql_name="pullRequest") + """Identifies the pull request associated with this thread.""" + + repository = sgqlc.types.Field(sgqlc.types.non_null("Repository"), graphql_name="repository") + """Identifies the repository associated with this thread.""" + + resolved_by = sgqlc.types.Field("User", graphql_name="resolvedBy") + """The user who resolved this thread""" + + start_diff_side = sgqlc.types.Field(DiffSide, graphql_name="startDiffSide") + """The side of the diff that the first line of the thread starts on + (multi-line only) + """ + + start_line = sgqlc.types.Field(Int, graphql_name="startLine") + """The start line in the file to which this thread refers (multi-line + only) + """ + + viewer_can_reply = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanReply") + """Indicates whether the current viewer can reply to this thread.""" + + viewer_can_resolve = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanResolve") + """Whether or not the viewer can resolve this thread""" + + viewer_can_unresolve = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanUnresolve") + """Whether or not the viewer can unresolve this thread""" + + +class Push(sgqlc.types.Type, Node): + """A Git push.""" + + __schema__ = github_schema + __field_names__ = ("next_sha", "permalink", "previous_sha", "pusher", "repository") + next_sha = sgqlc.types.Field(GitObjectID, graphql_name="nextSha") + """The SHA after the push""" + + permalink = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="permalink") + """The permalink for this push.""" + + previous_sha = sgqlc.types.Field(GitObjectID, graphql_name="previousSha") + """The SHA before the push""" + + pusher = sgqlc.types.Field(sgqlc.types.non_null(Actor), graphql_name="pusher") + """The actor who pushed""" + + repository = sgqlc.types.Field(sgqlc.types.non_null("Repository"), graphql_name="repository") + """The repository that was pushed to""" + + +class PushAllowance(sgqlc.types.Type, Node): + """A team, user, or app who has the ability to push to a protected + branch. + """ + + __schema__ = github_schema + __field_names__ = ("actor", "branch_protection_rule") + actor = sgqlc.types.Field("PushAllowanceActor", graphql_name="actor") + """The actor that can push.""" + + branch_protection_rule = sgqlc.types.Field(BranchProtectionRule, graphql_name="branchProtectionRule") + """Identifies the branch protection rule associated with the allowed + user, team, or app. + """ + + +class Reaction(sgqlc.types.Type, Node): + """An emoji reaction to a particular piece of content.""" + + __schema__ = github_schema + __field_names__ = ("content", "created_at", "database_id", "reactable", "user") + content = sgqlc.types.Field(sgqlc.types.non_null(ReactionContent), graphql_name="content") + """Identifies the emoji reaction.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + reactable = sgqlc.types.Field(sgqlc.types.non_null(Reactable), graphql_name="reactable") + """The reactable piece of content""" + + user = sgqlc.types.Field("User", graphql_name="user") + """Identifies the user who created this reaction.""" + + +class ReadyForReviewEvent(sgqlc.types.Type, Node, UniformResourceLocatable): + """Represents a 'ready_for_review' event on a given pull request.""" + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "pull_request") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + pull_request = sgqlc.types.Field(sgqlc.types.non_null(PullRequest), graphql_name="pullRequest") + """PullRequest referenced by event.""" + + +class Ref(sgqlc.types.Type, Node): + """Represents a Git reference.""" + + __schema__ = github_schema + __field_names__ = ("associated_pull_requests", "branch_protection_rule", "name", "prefix", "ref_update_rule", "repository", "target") + associated_pull_requests = sgqlc.types.Field( + sgqlc.types.non_null(PullRequestConnection), + graphql_name="associatedPullRequests", + args=sgqlc.types.ArgDict( + ( + ( + "states", + sgqlc.types.Arg(sgqlc.types.list_of(sgqlc.types.non_null(PullRequestState)), graphql_name="states", default=None), + ), + ("labels", sgqlc.types.Arg(sgqlc.types.list_of(sgqlc.types.non_null(String)), graphql_name="labels", default=None)), + ("head_ref_name", sgqlc.types.Arg(String, graphql_name="headRefName", default=None)), + ("base_ref_name", sgqlc.types.Arg(String, graphql_name="baseRefName", default=None)), + ("order_by", sgqlc.types.Arg(IssueOrder, graphql_name="orderBy", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of pull requests with this ref as the head ref. + + Arguments: + + * `states` (`[PullRequestState!]`): A list of states to filter the + pull requests by. + * `labels` (`[String!]`): A list of label names to filter the pull + requests by. + * `head_ref_name` (`String`): The head ref name to filter the pull + requests by. + * `base_ref_name` (`String`): The base ref name to filter the pull + requests by. + * `order_by` (`IssueOrder`): Ordering options for pull requests + returned from the connection. + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + branch_protection_rule = sgqlc.types.Field(BranchProtectionRule, graphql_name="branchProtectionRule") + """Branch protection rules for this ref""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The ref name.""" + + prefix = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="prefix") + """The ref's prefix, such as `refs/heads/` or `refs/tags/`.""" + + ref_update_rule = sgqlc.types.Field(RefUpdateRule, graphql_name="refUpdateRule") + """Branch protection rules that are viewable by non-admins""" + + repository = sgqlc.types.Field(sgqlc.types.non_null("Repository"), graphql_name="repository") + """The repository the ref belongs to.""" + + target = sgqlc.types.Field(GitObject, graphql_name="target") + """The object the ref points to. Returns null when object does not + exist. + """ + + +class ReferencedEvent(sgqlc.types.Type, Node): + """Represents a 'referenced' event on a given `ReferencedSubject`.""" + + __schema__ = github_schema + __field_names__ = ("actor", "commit", "commit_repository", "created_at", "is_cross_repository", "is_direct_reference", "subject") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + commit = sgqlc.types.Field(Commit, graphql_name="commit") + """Identifies the commit associated with the 'referenced' event.""" + + commit_repository = sgqlc.types.Field(sgqlc.types.non_null("Repository"), graphql_name="commitRepository") + """Identifies the repository associated with the 'referenced' event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + is_cross_repository = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isCrossRepository") + """Reference originated in a different repository.""" + + is_direct_reference = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isDirectReference") + """Checks if the commit message itself references the subject. Can be + false in the case of a commit comment reference. + """ + + subject = sgqlc.types.Field(sgqlc.types.non_null("ReferencedSubject"), graphql_name="subject") + """Object referenced by event.""" + + +class Release(sgqlc.types.Type, Node, UniformResourceLocatable, Reactable): + """A release contains the content for a release.""" + + __schema__ = github_schema + __field_names__ = ( + "author", + "created_at", + "description", + "description_html", + "is_draft", + "is_latest", + "is_prerelease", + "mentions", + "name", + "published_at", + "release_assets", + "repository", + "short_description_html", + "tag", + "tag_commit", + "tag_name", + "updated_at", + ) + author = sgqlc.types.Field("User", graphql_name="author") + """The author of the release""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + description = sgqlc.types.Field(String, graphql_name="description") + """The description of the release.""" + + description_html = sgqlc.types.Field(HTML, graphql_name="descriptionHTML") + """The description of this release rendered to HTML.""" + + is_draft = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isDraft") + """Whether or not the release is a draft""" + + is_latest = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isLatest") + """Whether or not the release is the latest releast""" + + is_prerelease = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isPrerelease") + """Whether or not the release is a prerelease""" + + mentions = sgqlc.types.Field( + UserConnection, + graphql_name="mentions", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of users mentioned in the release description + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + name = sgqlc.types.Field(String, graphql_name="name") + """The title of the release.""" + + published_at = sgqlc.types.Field(DateTime, graphql_name="publishedAt") + """Identifies the date and time when the release was created.""" + + release_assets = sgqlc.types.Field( + sgqlc.types.non_null(ReleaseAssetConnection), + graphql_name="releaseAssets", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("name", sgqlc.types.Arg(String, graphql_name="name", default=None)), + ) + ), + ) + """List of releases assets which are dependent on this release. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `name` (`String`): A list of names to filter the assets by. + """ + + repository = sgqlc.types.Field(sgqlc.types.non_null("Repository"), graphql_name="repository") + """The repository that the release belongs to.""" + + short_description_html = sgqlc.types.Field( + HTML, + graphql_name="shortDescriptionHTML", + args=sgqlc.types.ArgDict((("limit", sgqlc.types.Arg(Int, graphql_name="limit", default=200)),)), + ) + """A description of the release, rendered to HTML without any links + in it. + + Arguments: + + * `limit` (`Int`): How many characters to return. (default: `200`) + """ + + tag = sgqlc.types.Field(Ref, graphql_name="tag") + """The Git tag the release points to""" + + tag_commit = sgqlc.types.Field(Commit, graphql_name="tagCommit") + """The tag commit for this release.""" + + tag_name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="tagName") + """The name of the release's Git tag""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + +class ReleaseAsset(sgqlc.types.Type, Node): + """A release asset contains the content for a release asset.""" + + __schema__ = github_schema + __field_names__ = ( + "content_type", + "created_at", + "download_count", + "download_url", + "name", + "release", + "size", + "updated_at", + "uploaded_by", + "url", + ) + content_type = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="contentType") + """The asset's content-type""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + download_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="downloadCount") + """The number of times this asset was downloaded""" + + download_url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="downloadUrl") + """Identifies the URL where you can download the release asset via + the browser. + """ + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """Identifies the title of the release asset.""" + + release = sgqlc.types.Field(Release, graphql_name="release") + """Release that the asset is associated with""" + + size = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="size") + """The size (in bytes) of the asset""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + uploaded_by = sgqlc.types.Field(sgqlc.types.non_null("User"), graphql_name="uploadedBy") + """The user that performed the upload""" + + url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="url") + """Identifies the URL of the release asset.""" + + +class RemovedFromProjectEvent(sgqlc.types.Type, Node): + """Represents a 'removed_from_project' event on a given issue or pull + request. + """ + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "database_id") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + +class RenamedTitleEvent(sgqlc.types.Type, Node): + """Represents a 'renamed' event on a given issue or pull request""" + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "current_title", "previous_title", "subject") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + current_title = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="currentTitle") + """Identifies the current title of the issue or pull request.""" + + previous_title = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="previousTitle") + """Identifies the previous title of the issue or pull request.""" + + subject = sgqlc.types.Field(sgqlc.types.non_null("RenamedTitleSubject"), graphql_name="subject") + """Subject that was renamed.""" + + +class ReopenedEvent(sgqlc.types.Type, Node): + """Represents a 'reopened' event on any `Closable`.""" + + __schema__ = github_schema + __field_names__ = ("actor", "closable", "created_at", "state_reason") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + closable = sgqlc.types.Field(sgqlc.types.non_null(Closable), graphql_name="closable") + """Object that was reopened.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + state_reason = sgqlc.types.Field(IssueStateReason, graphql_name="stateReason") + """The reason the issue state was changed to open.""" + + +class RepoAccessAuditEntry(sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData, RepositoryAuditEntryData): + """Audit log entry for a repo.access event.""" + + __schema__ = github_schema + __field_names__ = ("visibility",) + visibility = sgqlc.types.Field(RepoAccessAuditEntryVisibility, graphql_name="visibility") + """The visibility of the repository""" + + +class RepoAddMemberAuditEntry(sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData, RepositoryAuditEntryData): + """Audit log entry for a repo.add_member event.""" + + __schema__ = github_schema + __field_names__ = ("visibility",) + visibility = sgqlc.types.Field(RepoAddMemberAuditEntryVisibility, graphql_name="visibility") + """The visibility of the repository""" + + +class RepoAddTopicAuditEntry(sgqlc.types.Type, Node, AuditEntry, RepositoryAuditEntryData, OrganizationAuditEntryData, TopicAuditEntryData): + """Audit log entry for a repo.add_topic event.""" + + __schema__ = github_schema + __field_names__ = () + + +class RepoArchivedAuditEntry(sgqlc.types.Type, Node, AuditEntry, RepositoryAuditEntryData, OrganizationAuditEntryData): + """Audit log entry for a repo.archived event.""" + + __schema__ = github_schema + __field_names__ = ("visibility",) + visibility = sgqlc.types.Field(RepoArchivedAuditEntryVisibility, graphql_name="visibility") + """The visibility of the repository""" + + +class RepoChangeMergeSettingAuditEntry(sgqlc.types.Type, Node, AuditEntry, RepositoryAuditEntryData, OrganizationAuditEntryData): + """Audit log entry for a repo.change_merge_setting event.""" + + __schema__ = github_schema + __field_names__ = ("is_enabled", "merge_type") + is_enabled = sgqlc.types.Field(Boolean, graphql_name="isEnabled") + """Whether the change was to enable (true) or disable (false) the + merge type + """ + + merge_type = sgqlc.types.Field(RepoChangeMergeSettingAuditEntryMergeType, graphql_name="mergeType") + """The merge method affected by the change""" + + +class RepoConfigDisableAnonymousGitAccessAuditEntry( + sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData, RepositoryAuditEntryData +): + """Audit log entry for a repo.config.disable_anonymous_git_access + event. + """ + + __schema__ = github_schema + __field_names__ = () + + +class RepoConfigDisableCollaboratorsOnlyAuditEntry( + sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData, RepositoryAuditEntryData +): + """Audit log entry for a repo.config.disable_collaborators_only + event. + """ + + __schema__ = github_schema + __field_names__ = () + + +class RepoConfigDisableContributorsOnlyAuditEntry(sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData, RepositoryAuditEntryData): + """Audit log entry for a repo.config.disable_contributors_only event.""" + + __schema__ = github_schema + __field_names__ = () + + +class RepoConfigDisableSockpuppetDisallowedAuditEntry( + sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData, RepositoryAuditEntryData +): + """Audit log entry for a repo.config.disable_sockpuppet_disallowed + event. + """ + + __schema__ = github_schema + __field_names__ = () + + +class RepoConfigEnableAnonymousGitAccessAuditEntry( + sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData, RepositoryAuditEntryData +): + """Audit log entry for a repo.config.enable_anonymous_git_access + event. + """ + + __schema__ = github_schema + __field_names__ = () + + +class RepoConfigEnableCollaboratorsOnlyAuditEntry(sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData, RepositoryAuditEntryData): + """Audit log entry for a repo.config.enable_collaborators_only event.""" + + __schema__ = github_schema + __field_names__ = () + + +class RepoConfigEnableContributorsOnlyAuditEntry(sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData, RepositoryAuditEntryData): + """Audit log entry for a repo.config.enable_contributors_only event.""" + + __schema__ = github_schema + __field_names__ = () + + +class RepoConfigEnableSockpuppetDisallowedAuditEntry( + sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData, RepositoryAuditEntryData +): + """Audit log entry for a repo.config.enable_sockpuppet_disallowed + event. + """ + + __schema__ = github_schema + __field_names__ = () + + +class RepoConfigLockAnonymousGitAccessAuditEntry(sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData, RepositoryAuditEntryData): + """Audit log entry for a repo.config.lock_anonymous_git_access event.""" + + __schema__ = github_schema + __field_names__ = () + + +class RepoConfigUnlockAnonymousGitAccessAuditEntry( + sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData, RepositoryAuditEntryData +): + """Audit log entry for a repo.config.unlock_anonymous_git_access + event. + """ + + __schema__ = github_schema + __field_names__ = () + + +class RepoCreateAuditEntry(sgqlc.types.Type, Node, AuditEntry, RepositoryAuditEntryData, OrganizationAuditEntryData): + """Audit log entry for a repo.create event.""" + + __schema__ = github_schema + __field_names__ = ("fork_parent_name", "fork_source_name", "visibility") + fork_parent_name = sgqlc.types.Field(String, graphql_name="forkParentName") + """The name of the parent repository for this forked repository.""" + + fork_source_name = sgqlc.types.Field(String, graphql_name="forkSourceName") + """The name of the root repository for this network.""" + + visibility = sgqlc.types.Field(RepoCreateAuditEntryVisibility, graphql_name="visibility") + """The visibility of the repository""" + + +class RepoDestroyAuditEntry(sgqlc.types.Type, Node, AuditEntry, RepositoryAuditEntryData, OrganizationAuditEntryData): + """Audit log entry for a repo.destroy event.""" + + __schema__ = github_schema + __field_names__ = ("visibility",) + visibility = sgqlc.types.Field(RepoDestroyAuditEntryVisibility, graphql_name="visibility") + """The visibility of the repository""" + + +class RepoRemoveMemberAuditEntry(sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData, RepositoryAuditEntryData): + """Audit log entry for a repo.remove_member event.""" + + __schema__ = github_schema + __field_names__ = ("visibility",) + visibility = sgqlc.types.Field(RepoRemoveMemberAuditEntryVisibility, graphql_name="visibility") + """The visibility of the repository""" + + +class RepoRemoveTopicAuditEntry( + sgqlc.types.Type, Node, AuditEntry, RepositoryAuditEntryData, OrganizationAuditEntryData, TopicAuditEntryData +): + """Audit log entry for a repo.remove_topic event.""" + + __schema__ = github_schema + __field_names__ = () + + +class Repository(sgqlc.types.Type, Node, ProjectOwner, PackageOwner, Subscribable, Starrable, UniformResourceLocatable, RepositoryInfo): + """A repository contains the content for a project.""" + + __schema__ = github_schema + __field_names__ = ( + "allow_update_branch", + "assignable_users", + "auto_merge_allowed", + "branch_protection_rules", + "code_of_conduct", + "codeowners", + "collaborators", + "commit_comments", + "contact_links", + "database_id", + "default_branch_ref", + "delete_branch_on_merge", + "deploy_keys", + "deployments", + "discussion", + "discussion_categories", + "discussions", + "disk_usage", + "environment", + "environments", + "forking_allowed", + "forks", + "funding_links", + "interaction_ability", + "is_blank_issues_enabled", + "is_disabled", + "is_empty", + "is_security_policy_enabled", + "is_user_configuration_repository", + "issue", + "issue_or_pull_request", + "issue_templates", + "issues", + "label", + "labels", + "languages", + "latest_release", + "mentionable_users", + "merge_commit_allowed", + "milestone", + "milestones", + "object", + "parent", + "pinned_discussions", + "pinned_issues", + "primary_language", + "project_next", + "projects_next", + "pull_request", + "pull_request_templates", + "pull_requests", + "rebase_merge_allowed", + "ref", + "refs", + "release", + "releases", + "repository_topics", + "security_policy_url", + "squash_merge_allowed", + "squash_pr_title_used_as_default", + "ssh_url", + "submodules", + "temp_clone_token", + "template_repository", + "viewer_can_administer", + "viewer_can_update_topics", + "viewer_default_commit_email", + "viewer_default_merge_method", + "viewer_permission", + "viewer_possible_commit_emails", + "vulnerability_alerts", + "watchers", + ) + allow_update_branch = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="allowUpdateBranch") + """Whether or not a pull request head branch that is behind its base + branch can always be updated even if it is not required to be up + to date before merging. + """ + + assignable_users = sgqlc.types.Field( + sgqlc.types.non_null(UserConnection), + graphql_name="assignableUsers", + args=sgqlc.types.ArgDict( + ( + ("query", sgqlc.types.Arg(String, graphql_name="query", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of users that can be assigned to issues in this repository. + + Arguments: + + * `query` (`String`): Filters users with query on user name and + login + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + auto_merge_allowed = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="autoMergeAllowed") + """Whether or not Auto-merge can be enabled on pull requests in this + repository. + """ + + branch_protection_rules = sgqlc.types.Field( + sgqlc.types.non_null(BranchProtectionRuleConnection), + graphql_name="branchProtectionRules", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of branch protection rules for this repository. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + code_of_conduct = sgqlc.types.Field(CodeOfConduct, graphql_name="codeOfConduct") + """Returns the code of conduct for this repository""" + + codeowners = sgqlc.types.Field( + RepositoryCodeowners, + graphql_name="codeowners", + args=sgqlc.types.ArgDict((("ref_name", sgqlc.types.Arg(String, graphql_name="refName", default=None)),)), + ) + """Information extracted from the repository's `CODEOWNERS` file. + + Arguments: + + * `ref_name` (`String`): The ref name used to return the + associated `CODEOWNERS` file. + """ + + collaborators = sgqlc.types.Field( + RepositoryCollaboratorConnection, + graphql_name="collaborators", + args=sgqlc.types.ArgDict( + ( + ("affiliation", sgqlc.types.Arg(CollaboratorAffiliation, graphql_name="affiliation", default=None)), + ("query", sgqlc.types.Arg(String, graphql_name="query", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of collaborators associated with the repository. + + Arguments: + + * `affiliation` (`CollaboratorAffiliation`): Collaborators + affiliation level with a repository. + * `query` (`String`): Filters users with query on user name and + login + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + commit_comments = sgqlc.types.Field( + sgqlc.types.non_null(CommitCommentConnection), + graphql_name="commitComments", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of commit comments associated with the repository. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + contact_links = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(RepositoryContactLink)), graphql_name="contactLinks") + """Returns a list of contact links associated to the repository""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + default_branch_ref = sgqlc.types.Field(Ref, graphql_name="defaultBranchRef") + """The Ref associated with the repository's default branch.""" + + delete_branch_on_merge = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="deleteBranchOnMerge") + """Whether or not branches are automatically deleted when merged in + this repository. + """ + + deploy_keys = sgqlc.types.Field( + sgqlc.types.non_null(DeployKeyConnection), + graphql_name="deployKeys", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of deploy keys that are on this repository. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + deployments = sgqlc.types.Field( + sgqlc.types.non_null(DeploymentConnection), + graphql_name="deployments", + args=sgqlc.types.ArgDict( + ( + ( + "environments", + sgqlc.types.Arg(sgqlc.types.list_of(sgqlc.types.non_null(String)), graphql_name="environments", default=None), + ), + ("order_by", sgqlc.types.Arg(DeploymentOrder, graphql_name="orderBy", default={"field": "CREATED_AT", "direction": "ASC"})), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """Deployments associated with the repository + + Arguments: + + * `environments` (`[String!]`): Environments to list deployments + for + * `order_by` (`DeploymentOrder`): Ordering options for deployments + returned from the connection. (default: `{field: CREATED_AT, + direction: ASC}`) + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + discussion = sgqlc.types.Field( + Discussion, + graphql_name="discussion", + args=sgqlc.types.ArgDict((("number", sgqlc.types.Arg(sgqlc.types.non_null(Int), graphql_name="number", default=None)),)), + ) + """Returns a single discussion from the current repository by number. + + Arguments: + + * `number` (`Int!`): The number for the discussion to be returned. + """ + + discussion_categories = sgqlc.types.Field( + sgqlc.types.non_null(DiscussionCategoryConnection), + graphql_name="discussionCategories", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("filter_by_assignable", sgqlc.types.Arg(Boolean, graphql_name="filterByAssignable", default=False)), + ) + ), + ) + """A list of discussion categories that are available in the + repository. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `filter_by_assignable` (`Boolean`): Filter by categories that + are assignable by the viewer. (default: `false`) + """ + + discussions = sgqlc.types.Field( + sgqlc.types.non_null(DiscussionConnection), + graphql_name="discussions", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("category_id", sgqlc.types.Arg(ID, graphql_name="categoryId", default=None)), + ( + "order_by", + sgqlc.types.Arg(DiscussionOrder, graphql_name="orderBy", default={"field": "UPDATED_AT", "direction": "DESC"}), + ), + ) + ), + ) + """A list of discussions that have been opened in the repository. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `category_id` (`ID`): Only include discussions that belong to + the category with this ID. (default: `null`) + * `order_by` (`DiscussionOrder`): Ordering options for discussions + returned from the connection. (default: `{field: UPDATED_AT, + direction: DESC}`) + """ + + disk_usage = sgqlc.types.Field(Int, graphql_name="diskUsage") + """The number of kilobytes this repository occupies on disk.""" + + environment = sgqlc.types.Field( + Environment, + graphql_name="environment", + args=sgqlc.types.ArgDict((("name", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="name", default=None)),)), + ) + """Returns a single active environment from the current repository by + name. + + Arguments: + + * `name` (`String!`): The name of the environment to be returned. + """ + + environments = sgqlc.types.Field( + sgqlc.types.non_null(EnvironmentConnection), + graphql_name="environments", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of environments that are in this repository. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + forking_allowed = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="forkingAllowed") + """Whether this repository allows forks.""" + + forks = sgqlc.types.Field( + sgqlc.types.non_null(RepositoryConnection), + graphql_name="forks", + args=sgqlc.types.ArgDict( + ( + ("privacy", sgqlc.types.Arg(RepositoryPrivacy, graphql_name="privacy", default=None)), + ("order_by", sgqlc.types.Arg(RepositoryOrder, graphql_name="orderBy", default=None)), + ("affiliations", sgqlc.types.Arg(sgqlc.types.list_of(RepositoryAffiliation), graphql_name="affiliations", default=None)), + ( + "owner_affiliations", + sgqlc.types.Arg( + sgqlc.types.list_of(RepositoryAffiliation), graphql_name="ownerAffiliations", default=("OWNER", "COLLABORATOR") + ), + ), + ("is_locked", sgqlc.types.Arg(Boolean, graphql_name="isLocked", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of direct forked repositories. + + Arguments: + + * `privacy` (`RepositoryPrivacy`): If non-null, filters + repositories according to privacy + * `order_by` (`RepositoryOrder`): Ordering options for + repositories returned from the connection + * `affiliations` (`[RepositoryAffiliation]`): Array of viewer's + affiliation options for repositories returned from the + connection. For example, OWNER will include only repositories + that the current viewer owns. + * `owner_affiliations` (`[RepositoryAffiliation]`): Array of + owner's affiliation options for repositories returned from the + connection. For example, OWNER will include only repositories + that the organization or user being viewed owns. (default: + `[OWNER, COLLABORATOR]`) + * `is_locked` (`Boolean`): If non-null, filters repositories + according to whether they have been locked + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + funding_links = sgqlc.types.Field( + sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(FundingLink))), graphql_name="fundingLinks" + ) + """The funding links for this repository""" + + interaction_ability = sgqlc.types.Field(RepositoryInteractionAbility, graphql_name="interactionAbility") + """The interaction ability settings for this repository.""" + + is_blank_issues_enabled = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isBlankIssuesEnabled") + """Returns true if blank issue creation is allowed""" + + is_disabled = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isDisabled") + """Returns whether or not this repository disabled.""" + + is_empty = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isEmpty") + """Returns whether or not this repository is empty.""" + + is_security_policy_enabled = sgqlc.types.Field(Boolean, graphql_name="isSecurityPolicyEnabled") + """Returns true if this repository has a security policy""" + + is_user_configuration_repository = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isUserConfigurationRepository") + """Is this repository a user configuration repository?""" + + issue = sgqlc.types.Field( + Issue, + graphql_name="issue", + args=sgqlc.types.ArgDict((("number", sgqlc.types.Arg(sgqlc.types.non_null(Int), graphql_name="number", default=None)),)), + ) + """Returns a single issue from the current repository by number. + + Arguments: + + * `number` (`Int!`): The number for the issue to be returned. + """ + + issue_or_pull_request = sgqlc.types.Field( + "IssueOrPullRequest", + graphql_name="issueOrPullRequest", + args=sgqlc.types.ArgDict((("number", sgqlc.types.Arg(sgqlc.types.non_null(Int), graphql_name="number", default=None)),)), + ) + """Returns a single issue-like object from the current repository by + number. + + Arguments: + + * `number` (`Int!`): The number for the issue to be returned. + """ + + issue_templates = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(IssueTemplate)), graphql_name="issueTemplates") + """Returns a list of issue templates associated to the repository""" + + issues = sgqlc.types.Field( + sgqlc.types.non_null(IssueConnection), + graphql_name="issues", + args=sgqlc.types.ArgDict( + ( + ("order_by", sgqlc.types.Arg(IssueOrder, graphql_name="orderBy", default=None)), + ("labels", sgqlc.types.Arg(sgqlc.types.list_of(sgqlc.types.non_null(String)), graphql_name="labels", default=None)), + ("states", sgqlc.types.Arg(sgqlc.types.list_of(sgqlc.types.non_null(IssueState)), graphql_name="states", default=None)), + ("filter_by", sgqlc.types.Arg(IssueFilters, graphql_name="filterBy", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of issues that have been opened in the repository. + + Arguments: + + * `order_by` (`IssueOrder`): Ordering options for issues returned + from the connection. + * `labels` (`[String!]`): A list of label names to filter the pull + requests by. + * `states` (`[IssueState!]`): A list of states to filter the + issues by. + * `filter_by` (`IssueFilters`): Filtering options for issues + returned from the connection. + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + label = sgqlc.types.Field( + Label, + graphql_name="label", + args=sgqlc.types.ArgDict((("name", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="name", default=None)),)), + ) + """Returns a single label by name + + Arguments: + + * `name` (`String!`): Label name + """ + + labels = sgqlc.types.Field( + LabelConnection, + graphql_name="labels", + args=sgqlc.types.ArgDict( + ( + ("order_by", sgqlc.types.Arg(LabelOrder, graphql_name="orderBy", default={"field": "CREATED_AT", "direction": "ASC"})), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("query", sgqlc.types.Arg(String, graphql_name="query", default=None)), + ) + ), + ) + """A list of labels associated with the repository. + + Arguments: + + * `order_by` (`LabelOrder`): Ordering options for labels returned + from the connection. (default: `{field: CREATED_AT, direction: + ASC}`) + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `query` (`String`): If provided, searches labels by name and + description. + """ + + languages = sgqlc.types.Field( + LanguageConnection, + graphql_name="languages", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("order_by", sgqlc.types.Arg(LanguageOrder, graphql_name="orderBy", default=None)), + ) + ), + ) + """A list containing a breakdown of the language composition of the + repository. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `order_by` (`LanguageOrder`): Order for connection + """ + + latest_release = sgqlc.types.Field(Release, graphql_name="latestRelease") + """Get the latest release for the repository if one exists.""" + + mentionable_users = sgqlc.types.Field( + sgqlc.types.non_null(UserConnection), + graphql_name="mentionableUsers", + args=sgqlc.types.ArgDict( + ( + ("query", sgqlc.types.Arg(String, graphql_name="query", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of Users that can be mentioned in the context of the + repository. + + Arguments: + + * `query` (`String`): Filters users with query on user name and + login + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + merge_commit_allowed = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="mergeCommitAllowed") + """Whether or not PRs are merged with a merge commit on this + repository. + """ + + milestone = sgqlc.types.Field( + Milestone, + graphql_name="milestone", + args=sgqlc.types.ArgDict((("number", sgqlc.types.Arg(sgqlc.types.non_null(Int), graphql_name="number", default=None)),)), + ) + """Returns a single milestone from the current repository by number. + + Arguments: + + * `number` (`Int!`): The number for the milestone to be returned. + """ + + milestones = sgqlc.types.Field( + MilestoneConnection, + graphql_name="milestones", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("states", sgqlc.types.Arg(sgqlc.types.list_of(sgqlc.types.non_null(MilestoneState)), graphql_name="states", default=None)), + ("order_by", sgqlc.types.Arg(MilestoneOrder, graphql_name="orderBy", default=None)), + ("query", sgqlc.types.Arg(String, graphql_name="query", default=None)), + ) + ), + ) + """A list of milestones associated with the repository. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `states` (`[MilestoneState!]`): Filter by the state of the + milestones. + * `order_by` (`MilestoneOrder`): Ordering options for milestones. + * `query` (`String`): Filters milestones with a query on the title + """ + + object = sgqlc.types.Field( + GitObject, + graphql_name="object", + args=sgqlc.types.ArgDict( + ( + ("oid", sgqlc.types.Arg(GitObjectID, graphql_name="oid", default=None)), + ("expression", sgqlc.types.Arg(String, graphql_name="expression", default=None)), + ) + ), + ) + """A Git object in the repository + + Arguments: + + * `oid` (`GitObjectID`): The Git object ID + * `expression` (`String`): A Git revision expression suitable for + rev-parse + """ + + parent = sgqlc.types.Field("Repository", graphql_name="parent") + """The repository parent, if this is a fork.""" + + pinned_discussions = sgqlc.types.Field( + sgqlc.types.non_null(PinnedDiscussionConnection), + graphql_name="pinnedDiscussions", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of discussions that have been pinned in this repository. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + pinned_issues = sgqlc.types.Field( + PinnedIssueConnection, + graphql_name="pinnedIssues", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of pinned issues for this repository. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + primary_language = sgqlc.types.Field(Language, graphql_name="primaryLanguage") + """The primary language of the repository's code.""" + + project_next = sgqlc.types.Field( + ProjectNext, + graphql_name="projectNext", + args=sgqlc.types.ArgDict((("number", sgqlc.types.Arg(sgqlc.types.non_null(Int), graphql_name="number", default=None)),)), + ) + """Finds and returns the Project (beta) according to the provided + Project (beta) number. + + Arguments: + + * `number` (`Int!`): The ProjectNext number. + """ + + projects_next = sgqlc.types.Field( + sgqlc.types.non_null(ProjectNextConnection), + graphql_name="projectsNext", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("query", sgqlc.types.Arg(String, graphql_name="query", default=None)), + ("sort_by", sgqlc.types.Arg(ProjectNextOrderField, graphql_name="sortBy", default="TITLE")), + ) + ), + ) + """List of projects (beta) linked to this repository. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `query` (`String`): A project (beta) to search for linked to the + repo. + * `sort_by` (`ProjectNextOrderField`): How to order the returned + project (beta) objects. (default: `TITLE`) + """ + + pull_request = sgqlc.types.Field( + PullRequest, + graphql_name="pullRequest", + args=sgqlc.types.ArgDict((("number", sgqlc.types.Arg(sgqlc.types.non_null(Int), graphql_name="number", default=None)),)), + ) + """Returns a single pull request from the current repository by + number. + + Arguments: + + * `number` (`Int!`): The number for the pull request to be + returned. + """ + + pull_request_templates = sgqlc.types.Field( + sgqlc.types.list_of(sgqlc.types.non_null(PullRequestTemplate)), graphql_name="pullRequestTemplates" + ) + """Returns a list of pull request templates associated to the + repository + """ + + pull_requests = sgqlc.types.Field( + sgqlc.types.non_null(PullRequestConnection), + graphql_name="pullRequests", + args=sgqlc.types.ArgDict( + ( + ( + "states", + sgqlc.types.Arg(sgqlc.types.list_of(sgqlc.types.non_null(PullRequestState)), graphql_name="states", default=None), + ), + ("labels", sgqlc.types.Arg(sgqlc.types.list_of(sgqlc.types.non_null(String)), graphql_name="labels", default=None)), + ("head_ref_name", sgqlc.types.Arg(String, graphql_name="headRefName", default=None)), + ("base_ref_name", sgqlc.types.Arg(String, graphql_name="baseRefName", default=None)), + ("order_by", sgqlc.types.Arg(IssueOrder, graphql_name="orderBy", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of pull requests that have been opened in the repository. + + Arguments: + + * `states` (`[PullRequestState!]`): A list of states to filter the + pull requests by. + * `labels` (`[String!]`): A list of label names to filter the pull + requests by. + * `head_ref_name` (`String`): The head ref name to filter the pull + requests by. + * `base_ref_name` (`String`): The base ref name to filter the pull + requests by. + * `order_by` (`IssueOrder`): Ordering options for pull requests + returned from the connection. + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + rebase_merge_allowed = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="rebaseMergeAllowed") + """Whether or not rebase-merging is enabled on this repository.""" + + ref = sgqlc.types.Field( + Ref, + graphql_name="ref", + args=sgqlc.types.ArgDict( + (("qualified_name", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="qualifiedName", default=None)),) + ), + ) + """Fetch a given ref from the repository + + Arguments: + + * `qualified_name` (`String!`): The ref to retrieve. Fully + qualified matches are checked in order (`refs/heads/master`) + before falling back onto checks for short name matches + (`master`). + """ + + refs = sgqlc.types.Field( + RefConnection, + graphql_name="refs", + args=sgqlc.types.ArgDict( + ( + ("query", sgqlc.types.Arg(String, graphql_name="query", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("ref_prefix", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="refPrefix", default=None)), + ("direction", sgqlc.types.Arg(OrderDirection, graphql_name="direction", default=None)), + ("order_by", sgqlc.types.Arg(RefOrder, graphql_name="orderBy", default=None)), + ) + ), + ) + """Fetch a list of refs from the repository + + Arguments: + + * `query` (`String`): Filters refs with query on name + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `ref_prefix` (`String!`): A ref name prefix like `refs/heads/`, + `refs/tags/`, etc. + * `direction` (`OrderDirection`): DEPRECATED: use orderBy. The + ordering direction. + * `order_by` (`RefOrder`): Ordering options for refs returned from + the connection. + """ + + release = sgqlc.types.Field( + Release, + graphql_name="release", + args=sgqlc.types.ArgDict((("tag_name", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="tagName", default=None)),)), + ) + """Lookup a single release given various criteria. + + Arguments: + + * `tag_name` (`String!`): The name of the Tag the Release was + created from + """ + + releases = sgqlc.types.Field( + sgqlc.types.non_null(ReleaseConnection), + graphql_name="releases", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("order_by", sgqlc.types.Arg(ReleaseOrder, graphql_name="orderBy", default=None)), + ) + ), + ) + """List of releases which are dependent on this repository. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `order_by` (`ReleaseOrder`): Order for connection + """ + + repository_topics = sgqlc.types.Field( + sgqlc.types.non_null(RepositoryTopicConnection), + graphql_name="repositoryTopics", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of applied repository-topic associations for this + repository. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + security_policy_url = sgqlc.types.Field(URI, graphql_name="securityPolicyUrl") + """The security policy URL.""" + + squash_merge_allowed = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="squashMergeAllowed") + """Whether or not squash-merging is enabled on this repository.""" + + squash_pr_title_used_as_default = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="squashPrTitleUsedAsDefault") + """Whether a squash merge commit can use the pull request title as + default. + """ + + ssh_url = sgqlc.types.Field(sgqlc.types.non_null(GitSSHRemote), graphql_name="sshUrl") + """The SSH URL to clone this repository""" + + submodules = sgqlc.types.Field( + sgqlc.types.non_null(SubmoduleConnection), + graphql_name="submodules", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """Returns a list of all submodules in this repository parsed from + the .gitmodules file as of the default branch's HEAD commit. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + temp_clone_token = sgqlc.types.Field(String, graphql_name="tempCloneToken") + """Temporary authentication token for cloning this repository.""" + + template_repository = sgqlc.types.Field("Repository", graphql_name="templateRepository") + """The repository from which this repository was generated, if any.""" + + viewer_can_administer = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanAdminister") + """Indicates whether the viewer has admin permissions on this + repository. + """ + + viewer_can_update_topics = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanUpdateTopics") + """Indicates whether the viewer can update the topics of this + repository. + """ + + viewer_default_commit_email = sgqlc.types.Field(String, graphql_name="viewerDefaultCommitEmail") + """The last commit email for the viewer.""" + + viewer_default_merge_method = sgqlc.types.Field(sgqlc.types.non_null(PullRequestMergeMethod), graphql_name="viewerDefaultMergeMethod") + """The last used merge method by the viewer or the default for the + repository. + """ + + viewer_permission = sgqlc.types.Field(RepositoryPermission, graphql_name="viewerPermission") + """The users permission level on the repository. Will return null if + authenticated as an GitHub App. + """ + + viewer_possible_commit_emails = sgqlc.types.Field( + sgqlc.types.list_of(sgqlc.types.non_null(String)), graphql_name="viewerPossibleCommitEmails" + ) + """A list of emails this viewer can commit with.""" + + vulnerability_alerts = sgqlc.types.Field( + RepositoryVulnerabilityAlertConnection, + graphql_name="vulnerabilityAlerts", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ( + "states", + sgqlc.types.Arg( + sgqlc.types.list_of(sgqlc.types.non_null(RepositoryVulnerabilityAlertState)), graphql_name="states", default=None + ), + ), + ) + ), + ) + """A list of vulnerability alerts that are on this repository. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `states` (`[RepositoryVulnerabilityAlertState!]`): Filter by the + state of the alert + """ + + watchers = sgqlc.types.Field( + sgqlc.types.non_null(UserConnection), + graphql_name="watchers", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of users watching the repository. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + +class RepositoryInvitation(sgqlc.types.Type, Node): + """An invitation for a user to be added to a repository.""" + + __schema__ = github_schema + __field_names__ = ("email", "invitee", "inviter", "permalink", "permission", "repository") + email = sgqlc.types.Field(String, graphql_name="email") + """The email address that received the invitation.""" + + invitee = sgqlc.types.Field("User", graphql_name="invitee") + """The user who received the invitation.""" + + inviter = sgqlc.types.Field(sgqlc.types.non_null("User"), graphql_name="inviter") + """The user who created the invitation.""" + + permalink = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="permalink") + """The permalink for this repository invitation.""" + + permission = sgqlc.types.Field(sgqlc.types.non_null(RepositoryPermission), graphql_name="permission") + """The permission granted on this repository by this invitation.""" + + repository = sgqlc.types.Field(RepositoryInfo, graphql_name="repository") + """The Repository the user is invited to.""" + + +class RepositoryMigration(sgqlc.types.Type, Node, Migration): + """An Octoshift repository migration.""" + + __schema__ = github_schema + __field_names__ = () + + +class RepositoryTopic(sgqlc.types.Type, Node, UniformResourceLocatable): + """A repository-topic connects a repository to a topic.""" + + __schema__ = github_schema + __field_names__ = ("topic",) + topic = sgqlc.types.Field(sgqlc.types.non_null("Topic"), graphql_name="topic") + """The topic.""" + + +class RepositoryVisibilityChangeDisableAuditEntry(sgqlc.types.Type, Node, AuditEntry, EnterpriseAuditEntryData, OrganizationAuditEntryData): + """Audit log entry for a repository_visibility_change.disable event.""" + + __schema__ = github_schema + __field_names__ = () + + +class RepositoryVisibilityChangeEnableAuditEntry(sgqlc.types.Type, Node, AuditEntry, EnterpriseAuditEntryData, OrganizationAuditEntryData): + """Audit log entry for a repository_visibility_change.enable event.""" + + __schema__ = github_schema + __field_names__ = () + + +class RepositoryVulnerabilityAlert(sgqlc.types.Type, Node, RepositoryNode): + """A Dependabot alert for a repository with a dependency affected by + a security vulnerability. + """ + + __schema__ = github_schema + __field_names__ = ( + "created_at", + "dependabot_update", + "dismiss_reason", + "dismissed_at", + "dismisser", + "fix_reason", + "fixed_at", + "number", + "security_advisory", + "security_vulnerability", + "state", + "vulnerable_manifest_filename", + "vulnerable_manifest_path", + "vulnerable_requirements", + ) + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """When was the alert created?""" + + dependabot_update = sgqlc.types.Field(DependabotUpdate, graphql_name="dependabotUpdate") + """The associated Dependabot update""" + + dismiss_reason = sgqlc.types.Field(String, graphql_name="dismissReason") + """The reason the alert was dismissed""" + + dismissed_at = sgqlc.types.Field(DateTime, graphql_name="dismissedAt") + """When was the alert dismissed?""" + + dismisser = sgqlc.types.Field("User", graphql_name="dismisser") + """The user who dismissed the alert""" + + fix_reason = sgqlc.types.Field(String, graphql_name="fixReason") + """The reason the alert was marked as fixed.""" + + fixed_at = sgqlc.types.Field(DateTime, graphql_name="fixedAt") + """When was the alert fixed?""" + + number = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="number") + """Identifies the alert number.""" + + security_advisory = sgqlc.types.Field("SecurityAdvisory", graphql_name="securityAdvisory") + """The associated security advisory""" + + security_vulnerability = sgqlc.types.Field(SecurityVulnerability, graphql_name="securityVulnerability") + """The associated security vulnerability""" + + state = sgqlc.types.Field(sgqlc.types.non_null(RepositoryVulnerabilityAlertState), graphql_name="state") + """Identifies the state of the alert.""" + + vulnerable_manifest_filename = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="vulnerableManifestFilename") + """The vulnerable manifest filename""" + + vulnerable_manifest_path = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="vulnerableManifestPath") + """The vulnerable manifest path""" + + vulnerable_requirements = sgqlc.types.Field(String, graphql_name="vulnerableRequirements") + """The vulnerable requirements""" + + +class RestrictedContribution(sgqlc.types.Type, Contribution): + """Represents a private contribution a user made on GitHub.""" + + __schema__ = github_schema + __field_names__ = () + + +class ReviewDismissalAllowance(sgqlc.types.Type, Node): + """A user, team, or app who has the ability to dismiss a review on a + protected branch. + """ + + __schema__ = github_schema + __field_names__ = ("actor", "branch_protection_rule") + actor = sgqlc.types.Field("ReviewDismissalAllowanceActor", graphql_name="actor") + """The actor that can dismiss.""" + + branch_protection_rule = sgqlc.types.Field(BranchProtectionRule, graphql_name="branchProtectionRule") + """Identifies the branch protection rule associated with the allowed + user, team, or app. + """ + + +class ReviewDismissedEvent(sgqlc.types.Type, Node, UniformResourceLocatable): + """Represents a 'review_dismissed' event on a given issue or pull + request. + """ + + __schema__ = github_schema + __field_names__ = ( + "actor", + "created_at", + "database_id", + "dismissal_message", + "dismissal_message_html", + "previous_review_state", + "pull_request", + "pull_request_commit", + "review", + ) + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + dismissal_message = sgqlc.types.Field(String, graphql_name="dismissalMessage") + """Identifies the optional message associated with the + 'review_dismissed' event. + """ + + dismissal_message_html = sgqlc.types.Field(String, graphql_name="dismissalMessageHTML") + """Identifies the optional message associated with the event, + rendered to HTML. + """ + + previous_review_state = sgqlc.types.Field(sgqlc.types.non_null(PullRequestReviewState), graphql_name="previousReviewState") + """Identifies the previous state of the review with the + 'review_dismissed' event. + """ + + pull_request = sgqlc.types.Field(sgqlc.types.non_null(PullRequest), graphql_name="pullRequest") + """PullRequest referenced by event.""" + + pull_request_commit = sgqlc.types.Field(PullRequestCommit, graphql_name="pullRequestCommit") + """Identifies the commit which caused the review to become stale.""" + + review = sgqlc.types.Field(PullRequestReview, graphql_name="review") + """Identifies the review associated with the 'review_dismissed' + event. + """ + + +class ReviewRequest(sgqlc.types.Type, Node): + """A request for a user to review a pull request.""" + + __schema__ = github_schema + __field_names__ = ("as_code_owner", "database_id", "pull_request", "requested_reviewer") + as_code_owner = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="asCodeOwner") + """Whether this request was created for a code owner""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + pull_request = sgqlc.types.Field(sgqlc.types.non_null(PullRequest), graphql_name="pullRequest") + """Identifies the pull request associated with this review request.""" + + requested_reviewer = sgqlc.types.Field("RequestedReviewer", graphql_name="requestedReviewer") + """The reviewer that is requested.""" + + +class ReviewRequestRemovedEvent(sgqlc.types.Type, Node): + """Represents an 'review_request_removed' event on a given pull + request. + """ + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "pull_request", "requested_reviewer") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + pull_request = sgqlc.types.Field(sgqlc.types.non_null(PullRequest), graphql_name="pullRequest") + """PullRequest referenced by event.""" + + requested_reviewer = sgqlc.types.Field("RequestedReviewer", graphql_name="requestedReviewer") + """Identifies the reviewer whose review request was removed.""" + + +class ReviewRequestedEvent(sgqlc.types.Type, Node): + """Represents an 'review_requested' event on a given pull request.""" + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "pull_request", "requested_reviewer") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + pull_request = sgqlc.types.Field(sgqlc.types.non_null(PullRequest), graphql_name="pullRequest") + """PullRequest referenced by event.""" + + requested_reviewer = sgqlc.types.Field("RequestedReviewer", graphql_name="requestedReviewer") + """Identifies the reviewer whose review was requested.""" + + +class ReviewStatusHovercardContext(sgqlc.types.Type, HovercardContext): + """A hovercard context with a message describing the current code + review state of the pull request. + """ + + __schema__ = github_schema + __field_names__ = ("review_decision",) + review_decision = sgqlc.types.Field(PullRequestReviewDecision, graphql_name="reviewDecision") + """The current status of the pull request with respect to code + review. + """ + + +class SavedReply(sgqlc.types.Type, Node): + """A Saved Reply is text a user can use to reply quickly.""" + + __schema__ = github_schema + __field_names__ = ("body", "body_html", "database_id", "title", "user") + body = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="body") + """The body of the saved reply.""" + + body_html = sgqlc.types.Field(sgqlc.types.non_null(HTML), graphql_name="bodyHTML") + """The saved reply body rendered to HTML.""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + title = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="title") + """The title of the saved reply.""" + + user = sgqlc.types.Field(Actor, graphql_name="user") + """The user that saved this reply.""" + + +class SecurityAdvisory(sgqlc.types.Type, Node): + """A GitHub Security Advisory""" + + __schema__ = github_schema + __field_names__ = ( + "classification", + "cvss", + "cwes", + "database_id", + "description", + "ghsa_id", + "identifiers", + "notifications_permalink", + "origin", + "permalink", + "published_at", + "references", + "severity", + "summary", + "updated_at", + "vulnerabilities", + "withdrawn_at", + ) + classification = sgqlc.types.Field(sgqlc.types.non_null(SecurityAdvisoryClassification), graphql_name="classification") + """The classification of the advisory""" + + cvss = sgqlc.types.Field(sgqlc.types.non_null(CVSS), graphql_name="cvss") + """The CVSS associated with this advisory""" + + cwes = sgqlc.types.Field( + sgqlc.types.non_null(CWEConnection), + graphql_name="cwes", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """CWEs associated with this Advisory + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + description = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="description") + """This is a long plaintext description of the advisory""" + + ghsa_id = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="ghsaId") + """The GitHub Security Advisory ID""" + + identifiers = sgqlc.types.Field( + sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(SecurityAdvisoryIdentifier))), graphql_name="identifiers" + ) + """A list of identifiers for this advisory""" + + notifications_permalink = sgqlc.types.Field(URI, graphql_name="notificationsPermalink") + """The permalink for the advisory's dependabot alerts page""" + + origin = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="origin") + """The organization that originated the advisory""" + + permalink = sgqlc.types.Field(URI, graphql_name="permalink") + """The permalink for the advisory""" + + published_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="publishedAt") + """When the advisory was published""" + + references = sgqlc.types.Field( + sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(SecurityAdvisoryReference))), graphql_name="references" + ) + """A list of references for this advisory""" + + severity = sgqlc.types.Field(sgqlc.types.non_null(SecurityAdvisorySeverity), graphql_name="severity") + """The severity of the advisory""" + + summary = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="summary") + """A short plaintext summary of the advisory""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """When the advisory was last updated""" + + vulnerabilities = sgqlc.types.Field( + sgqlc.types.non_null(SecurityVulnerabilityConnection), + graphql_name="vulnerabilities", + args=sgqlc.types.ArgDict( + ( + ( + "order_by", + sgqlc.types.Arg( + SecurityVulnerabilityOrder, graphql_name="orderBy", default={"field": "UPDATED_AT", "direction": "DESC"} + ), + ), + ("ecosystem", sgqlc.types.Arg(SecurityAdvisoryEcosystem, graphql_name="ecosystem", default=None)), + ("package", sgqlc.types.Arg(String, graphql_name="package", default=None)), + ( + "severities", + sgqlc.types.Arg( + sgqlc.types.list_of(sgqlc.types.non_null(SecurityAdvisorySeverity)), graphql_name="severities", default=None + ), + ), + ( + "classifications", + sgqlc.types.Arg( + sgqlc.types.list_of(sgqlc.types.non_null(SecurityAdvisoryClassification)), + graphql_name="classifications", + default=None, + ), + ), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """Vulnerabilities associated with this Advisory + + Arguments: + + * `order_by` (`SecurityVulnerabilityOrder`): Ordering options for + the returned topics. (default: `{field: UPDATED_AT, direction: + DESC}`) + * `ecosystem` (`SecurityAdvisoryEcosystem`): An ecosystem to + filter vulnerabilities by. + * `package` (`String`): A package name to filter vulnerabilities + by. + * `severities` (`[SecurityAdvisorySeverity!]`): A list of + severities to filter vulnerabilities by. + * `classifications` (`[SecurityAdvisoryClassification!]`): A list + of advisory classifications to filter vulnerabilities by. + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + withdrawn_at = sgqlc.types.Field(DateTime, graphql_name="withdrawnAt") + """When the advisory was withdrawn, if it has been withdrawn""" + + +class SmimeSignature(sgqlc.types.Type, GitSignature): + """Represents an S/MIME signature on a Commit or Tag.""" + + __schema__ = github_schema + __field_names__ = () + + +class SponsorsActivity(sgqlc.types.Type, Node): + """An event related to sponsorship activity.""" + + __schema__ = github_schema + __field_names__ = ("action", "previous_sponsors_tier", "sponsor", "sponsorable", "sponsors_tier", "timestamp") + action = sgqlc.types.Field(sgqlc.types.non_null(SponsorsActivityAction), graphql_name="action") + """What action this activity indicates took place.""" + + previous_sponsors_tier = sgqlc.types.Field("SponsorsTier", graphql_name="previousSponsorsTier") + """The tier that the sponsorship used to use, for tier change events.""" + + sponsor = sgqlc.types.Field("Sponsor", graphql_name="sponsor") + """The user or organization who triggered this activity and was/is + sponsoring the sponsorable. + """ + + sponsorable = sgqlc.types.Field(sgqlc.types.non_null(Sponsorable), graphql_name="sponsorable") + """The user or organization that is being sponsored, the maintainer.""" + + sponsors_tier = sgqlc.types.Field("SponsorsTier", graphql_name="sponsorsTier") + """The associated sponsorship tier.""" + + timestamp = sgqlc.types.Field(DateTime, graphql_name="timestamp") + """The timestamp of this event.""" + + +class SponsorsListing(sgqlc.types.Type, Node): + """A GitHub Sponsors listing.""" + + __schema__ = github_schema + __field_names__ = ( + "active_goal", + "created_at", + "full_description", + "full_description_html", + "is_public", + "name", + "next_payout_date", + "short_description", + "slug", + "sponsorable", + "tiers", + ) + active_goal = sgqlc.types.Field(SponsorsGoal, graphql_name="activeGoal") + """The current goal the maintainer is trying to reach with GitHub + Sponsors, if any. + """ + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + full_description = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="fullDescription") + """The full description of the listing.""" + + full_description_html = sgqlc.types.Field(sgqlc.types.non_null(HTML), graphql_name="fullDescriptionHTML") + """The full description of the listing rendered to HTML.""" + + is_public = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isPublic") + """Whether this listing is publicly visible.""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The listing's full name.""" + + next_payout_date = sgqlc.types.Field(Date, graphql_name="nextPayoutDate") + """A future date on which this listing is eligible to receive a + payout. + """ + + short_description = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="shortDescription") + """The short description of the listing.""" + + slug = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="slug") + """The short name of the listing.""" + + sponsorable = sgqlc.types.Field(sgqlc.types.non_null(Sponsorable), graphql_name="sponsorable") + """The entity this listing represents who can be sponsored on GitHub + Sponsors. + """ + + tiers = sgqlc.types.Field( + SponsorsTierConnection, + graphql_name="tiers", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ( + "order_by", + sgqlc.types.Arg( + SponsorsTierOrder, graphql_name="orderBy", default={"field": "MONTHLY_PRICE_IN_CENTS", "direction": "ASC"} + ), + ), + ) + ), + ) + """The published tiers for this GitHub Sponsors listing. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `order_by` (`SponsorsTierOrder`): Ordering options for Sponsors + tiers returned from the connection. (default: `{field: + MONTHLY_PRICE_IN_CENTS, direction: ASC}`) + """ + + +class SponsorsTier(sgqlc.types.Type, Node): + """A GitHub Sponsors tier associated with a GitHub Sponsors listing.""" + + __schema__ = github_schema + __field_names__ = ( + "admin_info", + "closest_lesser_value_tier", + "created_at", + "description", + "description_html", + "is_custom_amount", + "is_one_time", + "monthly_price_in_cents", + "monthly_price_in_dollars", + "name", + "sponsors_listing", + "updated_at", + ) + admin_info = sgqlc.types.Field(SponsorsTierAdminInfo, graphql_name="adminInfo") + """SponsorsTier information only visible to users that can administer + the associated Sponsors listing. + """ + + closest_lesser_value_tier = sgqlc.types.Field("SponsorsTier", graphql_name="closestLesserValueTier") + """Get a different tier for this tier's maintainer that is at the + same frequency as this tier but with an equal or lesser cost. + Returns the published tier with the monthly price closest to this + tier's without going over. + """ + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + description = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="description") + """The description of the tier.""" + + description_html = sgqlc.types.Field(sgqlc.types.non_null(HTML), graphql_name="descriptionHTML") + """The tier description rendered to HTML""" + + is_custom_amount = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isCustomAmount") + """Whether this tier was chosen at checkout time by the sponsor + rather than defined ahead of time by the maintainer who manages + the Sponsors listing. + """ + + is_one_time = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isOneTime") + """Whether this tier is only for use with one-time sponsorships.""" + + monthly_price_in_cents = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="monthlyPriceInCents") + """How much this tier costs per month in cents.""" + + monthly_price_in_dollars = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="monthlyPriceInDollars") + """How much this tier costs per month in USD.""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The name of the tier.""" + + sponsors_listing = sgqlc.types.Field(sgqlc.types.non_null(SponsorsListing), graphql_name="sponsorsListing") + """The sponsors listing that this tier belongs to.""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + +class Sponsorship(sgqlc.types.Type, Node): + """A sponsorship relationship between a sponsor and a maintainer""" + + __schema__ = github_schema + __field_names__ = ( + "created_at", + "is_one_time_payment", + "is_sponsor_opted_into_email", + "privacy_level", + "sponsor_entity", + "sponsorable", + "tier", + "tier_selected_at", + ) + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + is_one_time_payment = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isOneTimePayment") + """Whether this sponsorship represents a one-time payment versus a + recurring sponsorship. + """ + + is_sponsor_opted_into_email = sgqlc.types.Field(Boolean, graphql_name="isSponsorOptedIntoEmail") + """Check if the sponsor has chosen to receive sponsorship update + emails sent from the sponsorable. Only returns a non-null value + when the viewer has permission to know this. + """ + + privacy_level = sgqlc.types.Field(sgqlc.types.non_null(SponsorshipPrivacy), graphql_name="privacyLevel") + """The privacy level for this sponsorship.""" + + sponsor_entity = sgqlc.types.Field("Sponsor", graphql_name="sponsorEntity") + """The user or organization that is sponsoring, if you have + permission to view them. + """ + + sponsorable = sgqlc.types.Field(sgqlc.types.non_null(Sponsorable), graphql_name="sponsorable") + """The entity that is being sponsored""" + + tier = sgqlc.types.Field(SponsorsTier, graphql_name="tier") + """The associated sponsorship tier""" + + tier_selected_at = sgqlc.types.Field(DateTime, graphql_name="tierSelectedAt") + """Identifies the date and time when the current tier was chosen for + this sponsorship. + """ + + +class SponsorshipNewsletter(sgqlc.types.Type, Node): + """An update sent to sponsors of a user or organization on GitHub + Sponsors. + """ + + __schema__ = github_schema + __field_names__ = ("body", "created_at", "is_published", "sponsorable", "subject", "updated_at") + body = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="body") + """The contents of the newsletter, the message the sponsorable wanted + to give. + """ + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + is_published = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isPublished") + """Indicates if the newsletter has been made available to sponsors.""" + + sponsorable = sgqlc.types.Field(sgqlc.types.non_null(Sponsorable), graphql_name="sponsorable") + """The user or organization this newsletter is from.""" + + subject = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="subject") + """The subject of the newsletter, what it's about.""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + +class Status(sgqlc.types.Type, Node): + """Represents a commit status.""" + + __schema__ = github_schema + __field_names__ = ("combined_contexts", "commit", "context", "contexts", "state") + combined_contexts = sgqlc.types.Field( + sgqlc.types.non_null(StatusCheckRollupContextConnection), + graphql_name="combinedContexts", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of status contexts and check runs for this commit. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + commit = sgqlc.types.Field(Commit, graphql_name="commit") + """The commit this status is attached to.""" + + context = sgqlc.types.Field( + "StatusContext", + graphql_name="context", + args=sgqlc.types.ArgDict((("name", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="name", default=None)),)), + ) + """Looks up an individual status context by context name. + + Arguments: + + * `name` (`String!`): The context name. + """ + + contexts = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null("StatusContext"))), graphql_name="contexts") + """The individual status contexts for this commit.""" + + state = sgqlc.types.Field(sgqlc.types.non_null(StatusState), graphql_name="state") + """The combined commit status.""" + + +class StatusCheckRollup(sgqlc.types.Type, Node): + """Represents the rollup for both the check runs and status for a + commit. + """ + + __schema__ = github_schema + __field_names__ = ("commit", "contexts", "state") + commit = sgqlc.types.Field(Commit, graphql_name="commit") + """The commit the status and check runs are attached to.""" + + contexts = sgqlc.types.Field( + sgqlc.types.non_null(StatusCheckRollupContextConnection), + graphql_name="contexts", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of status contexts and check runs for this commit. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + state = sgqlc.types.Field(sgqlc.types.non_null(StatusState), graphql_name="state") + """The combined status for the commit.""" + + +class StatusContext(sgqlc.types.Type, RequirableByPullRequest, Node): + """Represents an individual commit status context""" + + __schema__ = github_schema + __field_names__ = ("avatar_url", "commit", "context", "created_at", "creator", "description", "state", "target_url") + avatar_url = sgqlc.types.Field( + URI, graphql_name="avatarUrl", args=sgqlc.types.ArgDict((("size", sgqlc.types.Arg(Int, graphql_name="size", default=40)),)) + ) + """The avatar of the OAuth application or the user that created the + status + + Arguments: + + * `size` (`Int`): The size of the resulting square image. + (default: `40`) + """ + + commit = sgqlc.types.Field(Commit, graphql_name="commit") + """This commit this status context is attached to.""" + + context = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="context") + """The name of this status context.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + creator = sgqlc.types.Field(Actor, graphql_name="creator") + """The actor who created this status context.""" + + description = sgqlc.types.Field(String, graphql_name="description") + """The description for this status context.""" + + state = sgqlc.types.Field(sgqlc.types.non_null(StatusState), graphql_name="state") + """The state of this status context.""" + + target_url = sgqlc.types.Field(URI, graphql_name="targetUrl") + """The URL for this status context.""" + + +class SubscribedEvent(sgqlc.types.Type, Node): + """Represents a 'subscribed' event on a given `Subscribable`.""" + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "subscribable") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + subscribable = sgqlc.types.Field(sgqlc.types.non_null(Subscribable), graphql_name="subscribable") + """Object referenced by event.""" + + +class Tag(sgqlc.types.Type, Node, GitObject): + """Represents a Git tag.""" + + __schema__ = github_schema + __field_names__ = ("message", "name", "tagger", "target") + message = sgqlc.types.Field(String, graphql_name="message") + """The Git tag message.""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The Git tag name.""" + + tagger = sgqlc.types.Field(GitActor, graphql_name="tagger") + """Details about the tag author.""" + + target = sgqlc.types.Field(sgqlc.types.non_null(GitObject), graphql_name="target") + """The Git object the tag points to.""" + + +class Team(sgqlc.types.Type, Node, Subscribable, MemberStatusable): + """A team of users in an organization.""" + + __schema__ = github_schema + __field_names__ = ( + "ancestors", + "avatar_url", + "child_teams", + "combined_slug", + "created_at", + "database_id", + "description", + "discussion", + "discussions", + "discussions_resource_path", + "discussions_url", + "edit_team_resource_path", + "edit_team_url", + "invitations", + "members", + "members_resource_path", + "members_url", + "name", + "new_team_resource_path", + "new_team_url", + "organization", + "parent_team", + "privacy", + "repositories", + "repositories_resource_path", + "repositories_url", + "resource_path", + "slug", + "teams_resource_path", + "teams_url", + "updated_at", + "url", + "viewer_can_administer", + ) + ancestors = sgqlc.types.Field( + sgqlc.types.non_null(TeamConnection), + graphql_name="ancestors", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of teams that are ancestors of this team. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + avatar_url = sgqlc.types.Field( + URI, graphql_name="avatarUrl", args=sgqlc.types.ArgDict((("size", sgqlc.types.Arg(Int, graphql_name="size", default=400)),)) + ) + """A URL pointing to the team's avatar. + + Arguments: + + * `size` (`Int`): The size in pixels of the resulting square + image. (default: `400`) + """ + + child_teams = sgqlc.types.Field( + sgqlc.types.non_null(TeamConnection), + graphql_name="childTeams", + args=sgqlc.types.ArgDict( + ( + ("order_by", sgqlc.types.Arg(TeamOrder, graphql_name="orderBy", default=None)), + ( + "user_logins", + sgqlc.types.Arg(sgqlc.types.list_of(sgqlc.types.non_null(String)), graphql_name="userLogins", default=None), + ), + ("immediate_only", sgqlc.types.Arg(Boolean, graphql_name="immediateOnly", default=True)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """List of child teams belonging to this team + + Arguments: + + * `order_by` (`TeamOrder`): Order for connection + * `user_logins` (`[String!]`): User logins to filter by + * `immediate_only` (`Boolean`): Whether to list immediate child + teams or all descendant child teams. (default: `true`) + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + combined_slug = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="combinedSlug") + """The slug corresponding to the organization and team.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + description = sgqlc.types.Field(String, graphql_name="description") + """The description of the team.""" + + discussion = sgqlc.types.Field( + "TeamDiscussion", + graphql_name="discussion", + args=sgqlc.types.ArgDict((("number", sgqlc.types.Arg(sgqlc.types.non_null(Int), graphql_name="number", default=None)),)), + ) + """Find a team discussion by its number. + + Arguments: + + * `number` (`Int!`): The sequence number of the discussion to + find. + """ + + discussions = sgqlc.types.Field( + sgqlc.types.non_null(TeamDiscussionConnection), + graphql_name="discussions", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("is_pinned", sgqlc.types.Arg(Boolean, graphql_name="isPinned", default=None)), + ("order_by", sgqlc.types.Arg(TeamDiscussionOrder, graphql_name="orderBy", default=None)), + ) + ), + ) + """A list of team discussions. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `is_pinned` (`Boolean`): If provided, filters discussions + according to whether or not they are pinned. + * `order_by` (`TeamDiscussionOrder`): Order for connection + """ + + discussions_resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="discussionsResourcePath") + """The HTTP path for team discussions""" + + discussions_url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="discussionsUrl") + """The HTTP URL for team discussions""" + + edit_team_resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="editTeamResourcePath") + """The HTTP path for editing this team""" + + edit_team_url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="editTeamUrl") + """The HTTP URL for editing this team""" + + invitations = sgqlc.types.Field( + OrganizationInvitationConnection, + graphql_name="invitations", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of pending invitations for users to this team + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + members = sgqlc.types.Field( + sgqlc.types.non_null(TeamMemberConnection), + graphql_name="members", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("query", sgqlc.types.Arg(String, graphql_name="query", default=None)), + ("membership", sgqlc.types.Arg(TeamMembershipType, graphql_name="membership", default="ALL")), + ("role", sgqlc.types.Arg(TeamMemberRole, graphql_name="role", default=None)), + ("order_by", sgqlc.types.Arg(TeamMemberOrder, graphql_name="orderBy", default=None)), + ) + ), + ) + """A list of users who are members of this team. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `query` (`String`): The search string to look for. + * `membership` (`TeamMembershipType`): Filter by membership type + (default: `ALL`) + * `role` (`TeamMemberRole`): Filter by team member role + * `order_by` (`TeamMemberOrder`): Order for the connection. + """ + + members_resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="membersResourcePath") + """The HTTP path for the team' members""" + + members_url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="membersUrl") + """The HTTP URL for the team' members""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The name of the team.""" + + new_team_resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="newTeamResourcePath") + """The HTTP path creating a new team""" + + new_team_url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="newTeamUrl") + """The HTTP URL creating a new team""" + + organization = sgqlc.types.Field(sgqlc.types.non_null(Organization), graphql_name="organization") + """The organization that owns this team.""" + + parent_team = sgqlc.types.Field("Team", graphql_name="parentTeam") + """The parent team of the team.""" + + privacy = sgqlc.types.Field(sgqlc.types.non_null(TeamPrivacy), graphql_name="privacy") + """The level of privacy the team has.""" + + repositories = sgqlc.types.Field( + sgqlc.types.non_null(TeamRepositoryConnection), + graphql_name="repositories", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("query", sgqlc.types.Arg(String, graphql_name="query", default=None)), + ("order_by", sgqlc.types.Arg(TeamRepositoryOrder, graphql_name="orderBy", default=None)), + ) + ), + ) + """A list of repositories this team has access to. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `query` (`String`): The search string to look for. + * `order_by` (`TeamRepositoryOrder`): Order for the connection. + """ + + repositories_resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="repositoriesResourcePath") + """The HTTP path for this team's repositories""" + + repositories_url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="repositoriesUrl") + """The HTTP URL for this team's repositories""" + + resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="resourcePath") + """The HTTP path for this team""" + + slug = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="slug") + """The slug corresponding to the team.""" + + teams_resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="teamsResourcePath") + """The HTTP path for this team's teams""" + + teams_url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="teamsUrl") + """The HTTP URL for this team's teams""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="url") + """The HTTP URL for this team""" + + viewer_can_administer = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanAdminister") + """Team is adminable by the viewer.""" + + +class TeamAddMemberAuditEntry(sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData, TeamAuditEntryData): + """Audit log entry for a team.add_member event.""" + + __schema__ = github_schema + __field_names__ = ("is_ldap_mapped",) + is_ldap_mapped = sgqlc.types.Field(Boolean, graphql_name="isLdapMapped") + """Whether the team was mapped to an LDAP Group.""" + + +class TeamAddRepositoryAuditEntry( + sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData, RepositoryAuditEntryData, TeamAuditEntryData +): + """Audit log entry for a team.add_repository event.""" + + __schema__ = github_schema + __field_names__ = ("is_ldap_mapped",) + is_ldap_mapped = sgqlc.types.Field(Boolean, graphql_name="isLdapMapped") + """Whether the team was mapped to an LDAP Group.""" + + +class TeamChangeParentTeamAuditEntry(sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData, TeamAuditEntryData): + """Audit log entry for a team.change_parent_team event.""" + + __schema__ = github_schema + __field_names__ = ( + "is_ldap_mapped", + "parent_team", + "parent_team_name", + "parent_team_name_was", + "parent_team_resource_path", + "parent_team_url", + "parent_team_was", + "parent_team_was_resource_path", + "parent_team_was_url", + ) + is_ldap_mapped = sgqlc.types.Field(Boolean, graphql_name="isLdapMapped") + """Whether the team was mapped to an LDAP Group.""" + + parent_team = sgqlc.types.Field(Team, graphql_name="parentTeam") + """The new parent team.""" + + parent_team_name = sgqlc.types.Field(String, graphql_name="parentTeamName") + """The name of the new parent team""" + + parent_team_name_was = sgqlc.types.Field(String, graphql_name="parentTeamNameWas") + """The name of the former parent team""" + + parent_team_resource_path = sgqlc.types.Field(URI, graphql_name="parentTeamResourcePath") + """The HTTP path for the parent team""" + + parent_team_url = sgqlc.types.Field(URI, graphql_name="parentTeamUrl") + """The HTTP URL for the parent team""" + + parent_team_was = sgqlc.types.Field(Team, graphql_name="parentTeamWas") + """The former parent team.""" + + parent_team_was_resource_path = sgqlc.types.Field(URI, graphql_name="parentTeamWasResourcePath") + """The HTTP path for the previous parent team""" + + parent_team_was_url = sgqlc.types.Field(URI, graphql_name="parentTeamWasUrl") + """The HTTP URL for the previous parent team""" + + +class TeamDiscussion( + sgqlc.types.Type, Node, Comment, Deletable, Reactable, Subscribable, UniformResourceLocatable, Updatable, UpdatableComment +): + """A team discussion.""" + + __schema__ = github_schema + __field_names__ = ( + "body_version", + "comments", + "comments_resource_path", + "comments_url", + "is_pinned", + "is_private", + "number", + "team", + "title", + "viewer_can_pin", + ) + body_version = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="bodyVersion") + """Identifies the discussion body hash.""" + + comments = sgqlc.types.Field( + sgqlc.types.non_null(TeamDiscussionCommentConnection), + graphql_name="comments", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("order_by", sgqlc.types.Arg(TeamDiscussionCommentOrder, graphql_name="orderBy", default=None)), + ("from_comment", sgqlc.types.Arg(Int, graphql_name="fromComment", default=None)), + ) + ), + ) + """A list of comments on this discussion. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `order_by` (`TeamDiscussionCommentOrder`): Order for connection + * `from_comment` (`Int`): When provided, filters the connection + such that results begin with the comment with this number. + """ + + comments_resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="commentsResourcePath") + """The HTTP path for discussion comments""" + + comments_url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="commentsUrl") + """The HTTP URL for discussion comments""" + + is_pinned = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isPinned") + """Whether or not the discussion is pinned.""" + + is_private = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isPrivate") + """Whether or not the discussion is only visible to team members and + org admins. + """ + + number = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="number") + """Identifies the discussion within its team.""" + + team = sgqlc.types.Field(sgqlc.types.non_null(Team), graphql_name="team") + """The team that defines the context of this discussion.""" + + title = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="title") + """The title of the discussion""" + + viewer_can_pin = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanPin") + """Whether or not the current viewer can pin this discussion.""" + + +class TeamDiscussionComment(sgqlc.types.Type, Node, Comment, Deletable, Reactable, UniformResourceLocatable, Updatable, UpdatableComment): + """A comment on a team discussion.""" + + __schema__ = github_schema + __field_names__ = ("body_version", "discussion", "number") + body_version = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="bodyVersion") + """The current version of the body content.""" + + discussion = sgqlc.types.Field(sgqlc.types.non_null(TeamDiscussion), graphql_name="discussion") + """The discussion this comment is about.""" + + number = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="number") + """Identifies the comment number.""" + + +class TeamRemoveMemberAuditEntry(sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData, TeamAuditEntryData): + """Audit log entry for a team.remove_member event.""" + + __schema__ = github_schema + __field_names__ = ("is_ldap_mapped",) + is_ldap_mapped = sgqlc.types.Field(Boolean, graphql_name="isLdapMapped") + """Whether the team was mapped to an LDAP Group.""" + + +class TeamRemoveRepositoryAuditEntry( + sgqlc.types.Type, Node, AuditEntry, OrganizationAuditEntryData, RepositoryAuditEntryData, TeamAuditEntryData +): + """Audit log entry for a team.remove_repository event.""" + + __schema__ = github_schema + __field_names__ = ("is_ldap_mapped",) + is_ldap_mapped = sgqlc.types.Field(Boolean, graphql_name="isLdapMapped") + """Whether the team was mapped to an LDAP Group.""" + + +class Topic(sgqlc.types.Type, Node, Starrable): + """A topic aggregates entities that are related to a subject.""" + + __schema__ = github_schema + __field_names__ = ("name", "related_topics", "repositories") + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The topic's name.""" + + related_topics = sgqlc.types.Field( + sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null("Topic"))), + graphql_name="relatedTopics", + args=sgqlc.types.ArgDict((("first", sgqlc.types.Arg(Int, graphql_name="first", default=3)),)), + ) + """A list of related topics, including aliases of this topic, sorted + with the most relevant first. Returns up to 10 Topics. + + Arguments: + + * `first` (`Int`): How many topics to return. (default: `3`) + """ + + repositories = sgqlc.types.Field( + sgqlc.types.non_null(RepositoryConnection), + graphql_name="repositories", + args=sgqlc.types.ArgDict( + ( + ("privacy", sgqlc.types.Arg(RepositoryPrivacy, graphql_name="privacy", default=None)), + ("order_by", sgqlc.types.Arg(RepositoryOrder, graphql_name="orderBy", default=None)), + ("affiliations", sgqlc.types.Arg(sgqlc.types.list_of(RepositoryAffiliation), graphql_name="affiliations", default=None)), + ( + "owner_affiliations", + sgqlc.types.Arg( + sgqlc.types.list_of(RepositoryAffiliation), graphql_name="ownerAffiliations", default=("OWNER", "COLLABORATOR") + ), + ), + ("is_locked", sgqlc.types.Arg(Boolean, graphql_name="isLocked", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("sponsorable_only", sgqlc.types.Arg(Boolean, graphql_name="sponsorableOnly", default=False)), + ) + ), + ) + """A list of repositories. + + Arguments: + + * `privacy` (`RepositoryPrivacy`): If non-null, filters + repositories according to privacy + * `order_by` (`RepositoryOrder`): Ordering options for + repositories returned from the connection + * `affiliations` (`[RepositoryAffiliation]`): Array of viewer's + affiliation options for repositories returned from the + connection. For example, OWNER will include only repositories + that the current viewer owns. + * `owner_affiliations` (`[RepositoryAffiliation]`): Array of + owner's affiliation options for repositories returned from the + connection. For example, OWNER will include only repositories + that the organization or user being viewed owns. (default: + `[OWNER, COLLABORATOR]`) + * `is_locked` (`Boolean`): If non-null, filters repositories + according to whether they have been locked + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `sponsorable_only` (`Boolean`): If true, only repositories whose + owner can be sponsored via GitHub Sponsors will be returned. + (default: `false`) + """ + + +class TransferredEvent(sgqlc.types.Type, Node): + """Represents a 'transferred' event on a given issue or pull request.""" + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "from_repository", "issue") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + from_repository = sgqlc.types.Field(Repository, graphql_name="fromRepository") + """The repository this came from""" + + issue = sgqlc.types.Field(sgqlc.types.non_null(Issue), graphql_name="issue") + """Identifies the issue associated with the event.""" + + +class Tree(sgqlc.types.Type, Node, GitObject): + """Represents a Git tree.""" + + __schema__ = github_schema + __field_names__ = ("entries",) + entries = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(TreeEntry)), graphql_name="entries") + """A list of tree entries.""" + + +class UnassignedEvent(sgqlc.types.Type, Node): + """Represents an 'unassigned' event on any assignable object.""" + + __schema__ = github_schema + __field_names__ = ("actor", "assignable", "assignee", "created_at") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + assignable = sgqlc.types.Field(sgqlc.types.non_null(Assignable), graphql_name="assignable") + """Identifies the assignable associated with the event.""" + + assignee = sgqlc.types.Field("Assignee", graphql_name="assignee") + """Identifies the user or mannequin that was unassigned.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + +class UnknownSignature(sgqlc.types.Type, GitSignature): + """Represents an unknown signature on a Commit or Tag.""" + + __schema__ = github_schema + __field_names__ = () + + +class UnlabeledEvent(sgqlc.types.Type, Node): + """Represents an 'unlabeled' event on a given issue or pull request.""" + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "label", "labelable") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + label = sgqlc.types.Field(sgqlc.types.non_null(Label), graphql_name="label") + """Identifies the label associated with the 'unlabeled' event.""" + + labelable = sgqlc.types.Field(sgqlc.types.non_null(Labelable), graphql_name="labelable") + """Identifies the `Labelable` associated with the event.""" + + +class UnlockedEvent(sgqlc.types.Type, Node): + """Represents an 'unlocked' event on a given issue or pull request.""" + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "lockable") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + lockable = sgqlc.types.Field(sgqlc.types.non_null(Lockable), graphql_name="lockable") + """Object that was unlocked.""" + + +class UnmarkedAsDuplicateEvent(sgqlc.types.Type, Node): + """Represents an 'unmarked_as_duplicate' event on a given issue or + pull request. + """ + + __schema__ = github_schema + __field_names__ = ("actor", "canonical", "created_at", "duplicate", "is_cross_repository") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + canonical = sgqlc.types.Field("IssueOrPullRequest", graphql_name="canonical") + """The authoritative issue or pull request which has been duplicated + by another. + """ + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + duplicate = sgqlc.types.Field("IssueOrPullRequest", graphql_name="duplicate") + """The issue or pull request which has been marked as a duplicate of + another. + """ + + is_cross_repository = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isCrossRepository") + """Canonical and duplicate belong to different repositories.""" + + +class UnpinnedEvent(sgqlc.types.Type, Node): + """Represents an 'unpinned' event on a given issue or pull request.""" + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "issue") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + issue = sgqlc.types.Field(sgqlc.types.non_null(Issue), graphql_name="issue") + """Identifies the issue associated with the event.""" + + +class UnsubscribedEvent(sgqlc.types.Type, Node): + """Represents an 'unsubscribed' event on a given `Subscribable`.""" + + __schema__ = github_schema + __field_names__ = ("actor", "created_at", "subscribable") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + subscribable = sgqlc.types.Field(sgqlc.types.non_null(Subscribable), graphql_name="subscribable") + """Object referenced by event.""" + + +class User( + sgqlc.types.Type, + Node, + Actor, + PackageOwner, + ProjectOwner, + ProjectNextOwner, + RepositoryDiscussionAuthor, + RepositoryDiscussionCommentAuthor, + RepositoryOwner, + UniformResourceLocatable, + ProfileOwner, + Sponsorable, +): + """A user is an individual's account on GitHub that owns repositories + and can make new content. + """ + + __schema__ = github_schema + __field_names__ = ( + "bio", + "bio_html", + "can_receive_organization_emails_when_notifications_restricted", + "commit_comments", + "company", + "company_html", + "contributions_collection", + "created_at", + "database_id", + "followers", + "following", + "gist", + "gist_comments", + "gists", + "hovercard", + "interaction_ability", + "is_bounty_hunter", + "is_campus_expert", + "is_developer_program_member", + "is_employee", + "is_following_viewer", + "is_git_hub_star", + "is_hireable", + "is_site_admin", + "is_viewer", + "issue_comments", + "issues", + "organization", + "organization_verified_domain_emails", + "organizations", + "public_keys", + "pull_requests", + "repositories_contributed_to", + "saved_replies", + "starred_repositories", + "status", + "top_repositories", + "twitter_username", + "updated_at", + "viewer_can_follow", + "viewer_is_following", + "watching", + ) + bio = sgqlc.types.Field(String, graphql_name="bio") + """The user's public profile bio.""" + + bio_html = sgqlc.types.Field(sgqlc.types.non_null(HTML), graphql_name="bioHTML") + """The user's public profile bio as HTML.""" + + can_receive_organization_emails_when_notifications_restricted = sgqlc.types.Field( + sgqlc.types.non_null(Boolean), + graphql_name="canReceiveOrganizationEmailsWhenNotificationsRestricted", + args=sgqlc.types.ArgDict((("login", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="login", default=None)),)), + ) + """Could this user receive email notifications, if the organization + had notification restrictions enabled? + + Arguments: + + * `login` (`String!`): The login of the organization to check. + """ + + commit_comments = sgqlc.types.Field( + sgqlc.types.non_null(CommitCommentConnection), + graphql_name="commitComments", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of commit comments made by this user. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + company = sgqlc.types.Field(String, graphql_name="company") + """The user's public profile company.""" + + company_html = sgqlc.types.Field(sgqlc.types.non_null(HTML), graphql_name="companyHTML") + """The user's public profile company as HTML.""" + + contributions_collection = sgqlc.types.Field( + sgqlc.types.non_null(ContributionsCollection), + graphql_name="contributionsCollection", + args=sgqlc.types.ArgDict( + ( + ("organization_id", sgqlc.types.Arg(ID, graphql_name="organizationID", default=None)), + ("from_", sgqlc.types.Arg(DateTime, graphql_name="from", default=None)), + ("to", sgqlc.types.Arg(DateTime, graphql_name="to", default=None)), + ) + ), + ) + """The collection of contributions this user has made to different + repositories. + + Arguments: + + * `organization_id` (`ID`): The ID of the organization used to + filter contributions. + * `from_` (`DateTime`): Only contributions made at this time or + later will be counted. If omitted, defaults to a year ago. + * `to` (`DateTime`): Only contributions made before and up to + (including) this time will be counted. If omitted, defaults to + the current time or one year from the provided from argument. + """ + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + followers = sgqlc.types.Field( + sgqlc.types.non_null(FollowerConnection), + graphql_name="followers", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of users the given user is followed by. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + following = sgqlc.types.Field( + sgqlc.types.non_null(FollowingConnection), + graphql_name="following", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of users the given user is following. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + gist = sgqlc.types.Field( + Gist, + graphql_name="gist", + args=sgqlc.types.ArgDict((("name", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="name", default=None)),)), + ) + """Find gist by repo name. + + Arguments: + + * `name` (`String!`): The gist name to find. + """ + + gist_comments = sgqlc.types.Field( + sgqlc.types.non_null(GistCommentConnection), + graphql_name="gistComments", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of gist comments made by this user. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + gists = sgqlc.types.Field( + sgqlc.types.non_null(GistConnection), + graphql_name="gists", + args=sgqlc.types.ArgDict( + ( + ("privacy", sgqlc.types.Arg(GistPrivacy, graphql_name="privacy", default=None)), + ("order_by", sgqlc.types.Arg(GistOrder, graphql_name="orderBy", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of the Gists the user has created. + + Arguments: + + * `privacy` (`GistPrivacy`): Filters Gists according to privacy. + * `order_by` (`GistOrder`): Ordering options for gists returned + from the connection + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + hovercard = sgqlc.types.Field( + sgqlc.types.non_null(Hovercard), + graphql_name="hovercard", + args=sgqlc.types.ArgDict((("primary_subject_id", sgqlc.types.Arg(ID, graphql_name="primarySubjectId", default=None)),)), + ) + """The hovercard information for this user in a given context + + Arguments: + + * `primary_subject_id` (`ID`): The ID of the subject to get the + hovercard in the context of + """ + + interaction_ability = sgqlc.types.Field(RepositoryInteractionAbility, graphql_name="interactionAbility") + """The interaction ability settings for this user.""" + + is_bounty_hunter = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isBountyHunter") + """Whether or not this user is a participant in the GitHub Security + Bug Bounty. + """ + + is_campus_expert = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isCampusExpert") + """Whether or not this user is a participant in the GitHub Campus + Experts Program. + """ + + is_developer_program_member = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isDeveloperProgramMember") + """Whether or not this user is a GitHub Developer Program member.""" + + is_employee = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isEmployee") + """Whether or not this user is a GitHub employee.""" + + is_following_viewer = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isFollowingViewer") + """Whether or not this user is following the viewer. Inverse of + viewer_is_following + """ + + is_git_hub_star = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isGitHubStar") + """Whether or not this user is a member of the GitHub Stars Program.""" + + is_hireable = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isHireable") + """Whether or not the user has marked themselves as for hire.""" + + is_site_admin = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isSiteAdmin") + """Whether or not this user is a site administrator.""" + + is_viewer = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isViewer") + """Whether or not this user is the viewing user.""" + + issue_comments = sgqlc.types.Field( + sgqlc.types.non_null(IssueCommentConnection), + graphql_name="issueComments", + args=sgqlc.types.ArgDict( + ( + ("order_by", sgqlc.types.Arg(IssueCommentOrder, graphql_name="orderBy", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of issue comments made by this user. + + Arguments: + + * `order_by` (`IssueCommentOrder`): Ordering options for issue + comments returned from the connection. + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + issues = sgqlc.types.Field( + sgqlc.types.non_null(IssueConnection), + graphql_name="issues", + args=sgqlc.types.ArgDict( + ( + ("order_by", sgqlc.types.Arg(IssueOrder, graphql_name="orderBy", default=None)), + ("labels", sgqlc.types.Arg(sgqlc.types.list_of(sgqlc.types.non_null(String)), graphql_name="labels", default=None)), + ("states", sgqlc.types.Arg(sgqlc.types.list_of(sgqlc.types.non_null(IssueState)), graphql_name="states", default=None)), + ("filter_by", sgqlc.types.Arg(IssueFilters, graphql_name="filterBy", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of issues associated with this user. + + Arguments: + + * `order_by` (`IssueOrder`): Ordering options for issues returned + from the connection. + * `labels` (`[String!]`): A list of label names to filter the pull + requests by. + * `states` (`[IssueState!]`): A list of states to filter the + issues by. + * `filter_by` (`IssueFilters`): Filtering options for issues + returned from the connection. + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + organization = sgqlc.types.Field( + Organization, + graphql_name="organization", + args=sgqlc.types.ArgDict((("login", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="login", default=None)),)), + ) + """Find an organization by its login that the user belongs to. + + Arguments: + + * `login` (`String!`): The login of the organization to find. + """ + + organization_verified_domain_emails = sgqlc.types.Field( + sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(String))), + graphql_name="organizationVerifiedDomainEmails", + args=sgqlc.types.ArgDict((("login", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="login", default=None)),)), + ) + """Verified email addresses that match verified domains for a + specified organization the user is a member of. + + Arguments: + + * `login` (`String!`): The login of the organization to match + verified domains from. + """ + + organizations = sgqlc.types.Field( + sgqlc.types.non_null(OrganizationConnection), + graphql_name="organizations", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of organizations the user belongs to. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + public_keys = sgqlc.types.Field( + sgqlc.types.non_null(PublicKeyConnection), + graphql_name="publicKeys", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of public keys associated with this user. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + pull_requests = sgqlc.types.Field( + sgqlc.types.non_null(PullRequestConnection), + graphql_name="pullRequests", + args=sgqlc.types.ArgDict( + ( + ( + "states", + sgqlc.types.Arg(sgqlc.types.list_of(sgqlc.types.non_null(PullRequestState)), graphql_name="states", default=None), + ), + ("labels", sgqlc.types.Arg(sgqlc.types.list_of(sgqlc.types.non_null(String)), graphql_name="labels", default=None)), + ("head_ref_name", sgqlc.types.Arg(String, graphql_name="headRefName", default=None)), + ("base_ref_name", sgqlc.types.Arg(String, graphql_name="baseRefName", default=None)), + ("order_by", sgqlc.types.Arg(IssueOrder, graphql_name="orderBy", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of pull requests associated with this user. + + Arguments: + + * `states` (`[PullRequestState!]`): A list of states to filter the + pull requests by. + * `labels` (`[String!]`): A list of label names to filter the pull + requests by. + * `head_ref_name` (`String`): The head ref name to filter the pull + requests by. + * `base_ref_name` (`String`): The base ref name to filter the pull + requests by. + * `order_by` (`IssueOrder`): Ordering options for pull requests + returned from the connection. + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + repositories_contributed_to = sgqlc.types.Field( + sgqlc.types.non_null(RepositoryConnection), + graphql_name="repositoriesContributedTo", + args=sgqlc.types.ArgDict( + ( + ("privacy", sgqlc.types.Arg(RepositoryPrivacy, graphql_name="privacy", default=None)), + ("order_by", sgqlc.types.Arg(RepositoryOrder, graphql_name="orderBy", default=None)), + ("is_locked", sgqlc.types.Arg(Boolean, graphql_name="isLocked", default=None)), + ("include_user_repositories", sgqlc.types.Arg(Boolean, graphql_name="includeUserRepositories", default=None)), + ( + "contribution_types", + sgqlc.types.Arg(sgqlc.types.list_of(RepositoryContributionType), graphql_name="contributionTypes", default=None), + ), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of repositories that the user recently contributed to. + + Arguments: + + * `privacy` (`RepositoryPrivacy`): If non-null, filters + repositories according to privacy + * `order_by` (`RepositoryOrder`): Ordering options for + repositories returned from the connection + * `is_locked` (`Boolean`): If non-null, filters repositories + according to whether they have been locked + * `include_user_repositories` (`Boolean`): If true, include user + repositories + * `contribution_types` (`[RepositoryContributionType]`): If non- + null, include only the specified types of contributions. The + GitHub.com UI uses [COMMIT, ISSUE, PULL_REQUEST, REPOSITORY] + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + saved_replies = sgqlc.types.Field( + SavedReplyConnection, + graphql_name="savedReplies", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ( + "order_by", + sgqlc.types.Arg(SavedReplyOrder, graphql_name="orderBy", default={"field": "UPDATED_AT", "direction": "DESC"}), + ), + ) + ), + ) + """Replies this user has saved + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `order_by` (`SavedReplyOrder`): The field to order saved replies + by. (default: `{field: UPDATED_AT, direction: DESC}`) + """ + + starred_repositories = sgqlc.types.Field( + sgqlc.types.non_null(StarredRepositoryConnection), + graphql_name="starredRepositories", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("owned_by_viewer", sgqlc.types.Arg(Boolean, graphql_name="ownedByViewer", default=None)), + ("order_by", sgqlc.types.Arg(StarOrder, graphql_name="orderBy", default=None)), + ) + ), + ) + """Repositories the user has starred. + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `owned_by_viewer` (`Boolean`): Filters starred repositories to + only return repositories owned by the viewer. + * `order_by` (`StarOrder`): Order for connection + """ + + status = sgqlc.types.Field("UserStatus", graphql_name="status") + """The user's description of what they're currently doing.""" + + top_repositories = sgqlc.types.Field( + sgqlc.types.non_null(RepositoryConnection), + graphql_name="topRepositories", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ("order_by", sgqlc.types.Arg(sgqlc.types.non_null(RepositoryOrder), graphql_name="orderBy", default=None)), + ("since", sgqlc.types.Arg(DateTime, graphql_name="since", default=None)), + ) + ), + ) + """Repositories the user has contributed to, ordered by contribution + rank, plus repositories the user has created + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + * `order_by` (`RepositoryOrder!`): Ordering options for + repositories returned from the connection + * `since` (`DateTime`): How far back in time to fetch contributed + repositories + """ + + twitter_username = sgqlc.types.Field(String, graphql_name="twitterUsername") + """The user's Twitter username.""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + viewer_can_follow = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerCanFollow") + """Whether or not the viewer is able to follow the user.""" + + viewer_is_following = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="viewerIsFollowing") + """Whether or not this user is followed by the viewer. Inverse of + is_following_viewer. + """ + + watching = sgqlc.types.Field( + sgqlc.types.non_null(RepositoryConnection), + graphql_name="watching", + args=sgqlc.types.ArgDict( + ( + ("privacy", sgqlc.types.Arg(RepositoryPrivacy, graphql_name="privacy", default=None)), + ("order_by", sgqlc.types.Arg(RepositoryOrder, graphql_name="orderBy", default=None)), + ("affiliations", sgqlc.types.Arg(sgqlc.types.list_of(RepositoryAffiliation), graphql_name="affiliations", default=None)), + ( + "owner_affiliations", + sgqlc.types.Arg( + sgqlc.types.list_of(RepositoryAffiliation), graphql_name="ownerAffiliations", default=("OWNER", "COLLABORATOR") + ), + ), + ("is_locked", sgqlc.types.Arg(Boolean, graphql_name="isLocked", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """A list of repositories the given user is watching. + + Arguments: + + * `privacy` (`RepositoryPrivacy`): If non-null, filters + repositories according to privacy + * `order_by` (`RepositoryOrder`): Ordering options for + repositories returned from the connection + * `affiliations` (`[RepositoryAffiliation]`): Affiliation options + for repositories returned from the connection. If none + specified, the results will include repositories for which the + current viewer is an owner or collaborator, or member. + * `owner_affiliations` (`[RepositoryAffiliation]`): Array of + owner's affiliation options for repositories returned from the + connection. For example, OWNER will include only repositories + that the organization or user being viewed owns. (default: + `[OWNER, COLLABORATOR]`) + * `is_locked` (`Boolean`): If non-null, filters repositories + according to whether they have been locked + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + +class UserBlockedEvent(sgqlc.types.Type, Node): + """Represents a 'user_blocked' event on a given user.""" + + __schema__ = github_schema + __field_names__ = ("actor", "block_duration", "created_at", "subject") + actor = sgqlc.types.Field(Actor, graphql_name="actor") + """Identifies the actor who performed the event.""" + + block_duration = sgqlc.types.Field(sgqlc.types.non_null(UserBlockDuration), graphql_name="blockDuration") + """Number of days that the user was blocked for.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + subject = sgqlc.types.Field(User, graphql_name="subject") + """The user who was blocked.""" + + +class UserContentEdit(sgqlc.types.Type, Node): + """An edit on user content""" + + __schema__ = github_schema + __field_names__ = ("created_at", "deleted_at", "deleted_by", "diff", "edited_at", "editor", "updated_at") + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + deleted_at = sgqlc.types.Field(DateTime, graphql_name="deletedAt") + """Identifies the date and time when the object was deleted.""" + + deleted_by = sgqlc.types.Field(Actor, graphql_name="deletedBy") + """The actor who deleted this content""" + + diff = sgqlc.types.Field(String, graphql_name="diff") + """A summary of the changes for this edit""" + + edited_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="editedAt") + """When this content was edited""" + + editor = sgqlc.types.Field(Actor, graphql_name="editor") + """The actor who edited this content""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + +class UserStatus(sgqlc.types.Type, Node): + """The user's description of what they're currently doing.""" + + __schema__ = github_schema + __field_names__ = ( + "created_at", + "emoji", + "emoji_html", + "expires_at", + "indicates_limited_availability", + "message", + "organization", + "updated_at", + "user", + ) + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + emoji = sgqlc.types.Field(String, graphql_name="emoji") + """An emoji summarizing the user's status.""" + + emoji_html = sgqlc.types.Field(HTML, graphql_name="emojiHTML") + """The status emoji as HTML.""" + + expires_at = sgqlc.types.Field(DateTime, graphql_name="expiresAt") + """If set, the status will not be shown after this date.""" + + indicates_limited_availability = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="indicatesLimitedAvailability") + """Whether this status indicates the user is not fully available on + GitHub. + """ + + message = sgqlc.types.Field(String, graphql_name="message") + """A brief message describing what the user is doing.""" + + organization = sgqlc.types.Field(Organization, graphql_name="organization") + """The organization whose members can see this status. If null, this + status is publicly visible. + """ + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + user = sgqlc.types.Field(sgqlc.types.non_null(User), graphql_name="user") + """The user who has this status.""" + + +class VerifiableDomain(sgqlc.types.Type, Node): + """A domain that can be verified or approved for an organization or + an enterprise. + """ + + __schema__ = github_schema + __field_names__ = ( + "created_at", + "database_id", + "dns_host_name", + "domain", + "has_found_host_name", + "has_found_verification_token", + "is_approved", + "is_required_for_policy_enforcement", + "is_verified", + "owner", + "punycode_encoded_domain", + "token_expiration_time", + "updated_at", + "verification_token", + ) + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + dns_host_name = sgqlc.types.Field(URI, graphql_name="dnsHostName") + """The DNS host name that should be used for verification.""" + + domain = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="domain") + """The unicode encoded domain.""" + + has_found_host_name = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="hasFoundHostName") + """Whether a TXT record for verification with the expected host name + was found. + """ + + has_found_verification_token = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="hasFoundVerificationToken") + """Whether a TXT record for verification with the expected + verification token was found. + """ + + is_approved = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isApproved") + """Whether or not the domain is approved.""" + + is_required_for_policy_enforcement = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isRequiredForPolicyEnforcement") + """Whether this domain is required to exist for an organization or + enterprise policy to be enforced. + """ + + is_verified = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isVerified") + """Whether or not the domain is verified.""" + + owner = sgqlc.types.Field(sgqlc.types.non_null("VerifiableDomainOwner"), graphql_name="owner") + """The owner of the domain.""" + + punycode_encoded_domain = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="punycodeEncodedDomain") + """The punycode encoded domain.""" + + token_expiration_time = sgqlc.types.Field(DateTime, graphql_name="tokenExpirationTime") + """The time that the current verification token will expire.""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + verification_token = sgqlc.types.Field(String, graphql_name="verificationToken") + """The current verification token for the domain.""" + + +class ViewerHovercardContext(sgqlc.types.Type, HovercardContext): + """A hovercard context with a message describing how the viewer is + related. + """ + + __schema__ = github_schema + __field_names__ = ("viewer",) + viewer = sgqlc.types.Field(sgqlc.types.non_null(User), graphql_name="viewer") + """Identifies the user who is related to this context.""" + + +class Workflow(sgqlc.types.Type, Node): + """A workflow contains meta information about an Actions workflow + file. + """ + + __schema__ = github_schema + __field_names__ = ("created_at", "database_id", "name", "updated_at") + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") + """The name of the workflow.""" + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + +class WorkflowRun(sgqlc.types.Type, Node): + """A workflow run.""" + + __schema__ = github_schema + __field_names__ = ( + "check_suite", + "created_at", + "database_id", + "deployment_reviews", + "pending_deployment_requests", + "resource_path", + "run_number", + "updated_at", + "url", + "workflow", + ) + check_suite = sgqlc.types.Field(sgqlc.types.non_null(CheckSuite), graphql_name="checkSuite") + """The check suite this workflow run belongs to.""" + + created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") + """Identifies the date and time when the object was created.""" + + database_id = sgqlc.types.Field(Int, graphql_name="databaseId") + """Identifies the primary key from the database.""" + + deployment_reviews = sgqlc.types.Field( + sgqlc.types.non_null(DeploymentReviewConnection), + graphql_name="deploymentReviews", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """The log of deployment reviews + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + pending_deployment_requests = sgqlc.types.Field( + sgqlc.types.non_null(DeploymentRequestConnection), + graphql_name="pendingDeploymentRequests", + args=sgqlc.types.ArgDict( + ( + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("before", sgqlc.types.Arg(String, graphql_name="before", default=None)), + ("first", sgqlc.types.Arg(Int, graphql_name="first", default=None)), + ("last", sgqlc.types.Arg(Int, graphql_name="last", default=None)), + ) + ), + ) + """The pending deployment requests of all check runs in this workflow + run + + Arguments: + + * `after` (`String`): Returns the elements in the list that come + after the specified cursor. + * `before` (`String`): Returns the elements in the list that come + before the specified cursor. + * `first` (`Int`): Returns the first _n_ elements from the list. + * `last` (`Int`): Returns the last _n_ elements from the list. + """ + + resource_path = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="resourcePath") + """The HTTP path for this workflow run""" + + run_number = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="runNumber") + """A number that uniquely identifies this workflow run in its parent + workflow. + """ + + updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="updatedAt") + """Identifies the date and time when the object was last updated.""" + + url = sgqlc.types.Field(sgqlc.types.non_null(URI), graphql_name="url") + """The HTTP URL for this workflow run""" + + workflow = sgqlc.types.Field(sgqlc.types.non_null(Workflow), graphql_name="workflow") + """The workflow executed in this workflow run.""" + + +######################################################################## +# Unions +######################################################################## +class Assignee(sgqlc.types.Union): + """Types that can be assigned to issues.""" + + __schema__ = github_schema + __types__ = (Bot, Mannequin, Organization, User) + + +class AuditEntryActor(sgqlc.types.Union): + """Types that can initiate an audit log event.""" + + __schema__ = github_schema + __types__ = (Bot, Organization, User) + + +class BranchActorAllowanceActor(sgqlc.types.Union): + """Types which can be actors for `BranchActorAllowance` objects.""" + + __schema__ = github_schema + __types__ = (App, Team, User) + + +class Closer(sgqlc.types.Union): + """The object which triggered a `ClosedEvent`.""" + + __schema__ = github_schema + __types__ = (Commit, PullRequest) + + +class CreatedIssueOrRestrictedContribution(sgqlc.types.Union): + """Represents either a issue the viewer can access or a restricted + contribution. + """ + + __schema__ = github_schema + __types__ = (CreatedIssueContribution, RestrictedContribution) + + +class CreatedPullRequestOrRestrictedContribution(sgqlc.types.Union): + """Represents either a pull request the viewer can access or a + restricted contribution. + """ + + __schema__ = github_schema + __types__ = (CreatedPullRequestContribution, RestrictedContribution) + + +class CreatedRepositoryOrRestrictedContribution(sgqlc.types.Union): + """Represents either a repository the viewer can access or a + restricted contribution. + """ + + __schema__ = github_schema + __types__ = (CreatedRepositoryContribution, RestrictedContribution) + + +class DeploymentReviewer(sgqlc.types.Union): + """Users and teams.""" + + __schema__ = github_schema + __types__ = (Team, User) + + +class EnterpriseMember(sgqlc.types.Union): + """An object that is a member of an enterprise.""" + + __schema__ = github_schema + __types__ = (EnterpriseUserAccount, User) + + +class IpAllowListOwner(sgqlc.types.Union): + """Types that can own an IP allow list.""" + + __schema__ = github_schema + __types__ = (App, Enterprise, Organization) + + +class IssueOrPullRequest(sgqlc.types.Union): + """Used for return value of Repository.issueOrPullRequest.""" + + __schema__ = github_schema + __types__ = (Issue, PullRequest) + + +class IssueTimelineItem(sgqlc.types.Union): + """An item in an issue timeline""" + + __schema__ = github_schema + __types__ = ( + AssignedEvent, + ClosedEvent, + Commit, + CrossReferencedEvent, + DemilestonedEvent, + IssueComment, + LabeledEvent, + LockedEvent, + MilestonedEvent, + ReferencedEvent, + RenamedTitleEvent, + ReopenedEvent, + SubscribedEvent, + TransferredEvent, + UnassignedEvent, + UnlabeledEvent, + UnlockedEvent, + UnsubscribedEvent, + UserBlockedEvent, + ) + + +class IssueTimelineItems(sgqlc.types.Union): + """An item in an issue timeline""" + + __schema__ = github_schema + __types__ = ( + AddedToProjectEvent, + AssignedEvent, + ClosedEvent, + CommentDeletedEvent, + ConnectedEvent, + ConvertedNoteToIssueEvent, + ConvertedToDiscussionEvent, + CrossReferencedEvent, + DemilestonedEvent, + DisconnectedEvent, + IssueComment, + LabeledEvent, + LockedEvent, + MarkedAsDuplicateEvent, + MentionedEvent, + MilestonedEvent, + MovedColumnsInProjectEvent, + PinnedEvent, + ReferencedEvent, + RemovedFromProjectEvent, + RenamedTitleEvent, + ReopenedEvent, + SubscribedEvent, + TransferredEvent, + UnassignedEvent, + UnlabeledEvent, + UnlockedEvent, + UnmarkedAsDuplicateEvent, + UnpinnedEvent, + UnsubscribedEvent, + UserBlockedEvent, + ) + + +class MilestoneItem(sgqlc.types.Union): + """Types that can be inside a Milestone.""" + + __schema__ = github_schema + __types__ = (Issue, PullRequest) + + +class OrgRestoreMemberAuditEntryMembership(sgqlc.types.Union): + """Types of memberships that can be restored for an Organization + member. + """ + + __schema__ = github_schema + __types__ = ( + OrgRestoreMemberMembershipOrganizationAuditEntryData, + OrgRestoreMemberMembershipRepositoryAuditEntryData, + OrgRestoreMemberMembershipTeamAuditEntryData, + ) + + +class OrganizationAuditEntry(sgqlc.types.Union): + """An audit entry in an organization audit log.""" + + __schema__ = github_schema + __types__ = ( + MembersCanDeleteReposClearAuditEntry, + MembersCanDeleteReposDisableAuditEntry, + MembersCanDeleteReposEnableAuditEntry, + OauthApplicationCreateAuditEntry, + OrgAddBillingManagerAuditEntry, + OrgAddMemberAuditEntry, + OrgBlockUserAuditEntry, + OrgConfigDisableCollaboratorsOnlyAuditEntry, + OrgConfigEnableCollaboratorsOnlyAuditEntry, + OrgCreateAuditEntry, + OrgDisableOauthAppRestrictionsAuditEntry, + OrgDisableSamlAuditEntry, + OrgDisableTwoFactorRequirementAuditEntry, + OrgEnableOauthAppRestrictionsAuditEntry, + OrgEnableSamlAuditEntry, + OrgEnableTwoFactorRequirementAuditEntry, + OrgInviteMemberAuditEntry, + OrgInviteToBusinessAuditEntry, + OrgOauthAppAccessApprovedAuditEntry, + OrgOauthAppAccessDeniedAuditEntry, + OrgOauthAppAccessRequestedAuditEntry, + OrgRemoveBillingManagerAuditEntry, + OrgRemoveMemberAuditEntry, + OrgRemoveOutsideCollaboratorAuditEntry, + OrgRestoreMemberAuditEntry, + OrgUnblockUserAuditEntry, + OrgUpdateDefaultRepositoryPermissionAuditEntry, + OrgUpdateMemberAuditEntry, + OrgUpdateMemberRepositoryCreationPermissionAuditEntry, + OrgUpdateMemberRepositoryInvitationPermissionAuditEntry, + PrivateRepositoryForkingDisableAuditEntry, + PrivateRepositoryForkingEnableAuditEntry, + RepoAccessAuditEntry, + RepoAddMemberAuditEntry, + RepoAddTopicAuditEntry, + RepoArchivedAuditEntry, + RepoChangeMergeSettingAuditEntry, + RepoConfigDisableAnonymousGitAccessAuditEntry, + RepoConfigDisableCollaboratorsOnlyAuditEntry, + RepoConfigDisableContributorsOnlyAuditEntry, + RepoConfigDisableSockpuppetDisallowedAuditEntry, + RepoConfigEnableAnonymousGitAccessAuditEntry, + RepoConfigEnableCollaboratorsOnlyAuditEntry, + RepoConfigEnableContributorsOnlyAuditEntry, + RepoConfigEnableSockpuppetDisallowedAuditEntry, + RepoConfigLockAnonymousGitAccessAuditEntry, + RepoConfigUnlockAnonymousGitAccessAuditEntry, + RepoCreateAuditEntry, + RepoDestroyAuditEntry, + RepoRemoveMemberAuditEntry, + RepoRemoveTopicAuditEntry, + RepositoryVisibilityChangeDisableAuditEntry, + RepositoryVisibilityChangeEnableAuditEntry, + TeamAddMemberAuditEntry, + TeamAddRepositoryAuditEntry, + TeamChangeParentTeamAuditEntry, + TeamRemoveMemberAuditEntry, + TeamRemoveRepositoryAuditEntry, + ) + + +class PermissionGranter(sgqlc.types.Union): + """Types that can grant permissions on a repository to a user""" + + __schema__ = github_schema + __types__ = (Organization, Repository, Team) + + +class PinnableItem(sgqlc.types.Union): + """Types that can be pinned to a profile page.""" + + __schema__ = github_schema + __types__ = (Gist, Repository) + + +class ProjectCardItem(sgqlc.types.Union): + """Types that can be inside Project Cards.""" + + __schema__ = github_schema + __types__ = (Issue, PullRequest) + + +class ProjectNextItemContent(sgqlc.types.Union): + """Types that can be inside Project Items.""" + + __schema__ = github_schema + __types__ = (DraftIssue, Issue, PullRequest) + + +class PullRequestTimelineItem(sgqlc.types.Union): + """An item in a pull request timeline""" + + __schema__ = github_schema + __types__ = ( + AssignedEvent, + BaseRefDeletedEvent, + BaseRefForcePushedEvent, + ClosedEvent, + Commit, + CommitCommentThread, + CrossReferencedEvent, + DemilestonedEvent, + DeployedEvent, + DeploymentEnvironmentChangedEvent, + HeadRefDeletedEvent, + HeadRefForcePushedEvent, + HeadRefRestoredEvent, + IssueComment, + LabeledEvent, + LockedEvent, + MergedEvent, + MilestonedEvent, + PullRequestReview, + PullRequestReviewComment, + PullRequestReviewThread, + ReferencedEvent, + RenamedTitleEvent, + ReopenedEvent, + ReviewDismissedEvent, + ReviewRequestRemovedEvent, + ReviewRequestedEvent, + SubscribedEvent, + UnassignedEvent, + UnlabeledEvent, + UnlockedEvent, + UnsubscribedEvent, + UserBlockedEvent, + ) + + +class PullRequestTimelineItems(sgqlc.types.Union): + """An item in a pull request timeline""" + + __schema__ = github_schema + __types__ = ( + AddedToProjectEvent, + AssignedEvent, + AutoMergeDisabledEvent, + AutoMergeEnabledEvent, + AutoRebaseEnabledEvent, + AutoSquashEnabledEvent, + AutomaticBaseChangeFailedEvent, + AutomaticBaseChangeSucceededEvent, + BaseRefChangedEvent, + BaseRefDeletedEvent, + BaseRefForcePushedEvent, + ClosedEvent, + CommentDeletedEvent, + ConnectedEvent, + ConvertToDraftEvent, + ConvertedNoteToIssueEvent, + ConvertedToDiscussionEvent, + CrossReferencedEvent, + DemilestonedEvent, + DeployedEvent, + DeploymentEnvironmentChangedEvent, + DisconnectedEvent, + HeadRefDeletedEvent, + HeadRefForcePushedEvent, + HeadRefRestoredEvent, + IssueComment, + LabeledEvent, + LockedEvent, + MarkedAsDuplicateEvent, + MentionedEvent, + MergedEvent, + MilestonedEvent, + MovedColumnsInProjectEvent, + PinnedEvent, + PullRequestCommit, + PullRequestCommitCommentThread, + PullRequestReview, + PullRequestReviewThread, + PullRequestRevisionMarker, + ReadyForReviewEvent, + ReferencedEvent, + RemovedFromProjectEvent, + RenamedTitleEvent, + ReopenedEvent, + ReviewDismissedEvent, + ReviewRequestRemovedEvent, + ReviewRequestedEvent, + SubscribedEvent, + TransferredEvent, + UnassignedEvent, + UnlabeledEvent, + UnlockedEvent, + UnmarkedAsDuplicateEvent, + UnpinnedEvent, + UnsubscribedEvent, + UserBlockedEvent, + ) + + +class PushAllowanceActor(sgqlc.types.Union): + """Types that can be an actor.""" + + __schema__ = github_schema + __types__ = (App, Team, User) + + +class Reactor(sgqlc.types.Union): + """Types that can be assigned to reactions.""" + + __schema__ = github_schema + __types__ = (Bot, Mannequin, Organization, User) + + +class ReferencedSubject(sgqlc.types.Union): + """Any referencable object""" + + __schema__ = github_schema + __types__ = (Issue, PullRequest) + + +class RenamedTitleSubject(sgqlc.types.Union): + """An object which has a renamable title""" + + __schema__ = github_schema + __types__ = (Issue, PullRequest) + + +class RequestedReviewer(sgqlc.types.Union): + """Types that can be requested reviewers.""" + + __schema__ = github_schema + __types__ = (Mannequin, Team, User) + + +class ReviewDismissalAllowanceActor(sgqlc.types.Union): + """Types that can be an actor.""" + + __schema__ = github_schema + __types__ = (App, Team, User) + + +class SearchResultItem(sgqlc.types.Union): + """The results of a search.""" + + __schema__ = github_schema + __types__ = (App, Discussion, Issue, MarketplaceListing, Organization, PullRequest, Repository, User) + + +class Sponsor(sgqlc.types.Union): + """Entities that can sponsor others via GitHub Sponsors""" + + __schema__ = github_schema + __types__ = (Organization, User) + + +class SponsorableItem(sgqlc.types.Union): + """Entities that can be sponsored via GitHub Sponsors""" + + __schema__ = github_schema + __types__ = (Organization, User) + + +class StatusCheckRollupContext(sgqlc.types.Union): + """Types that can be inside a StatusCheckRollup context.""" + + __schema__ = github_schema + __types__ = (CheckRun, StatusContext) + + +class VerifiableDomainOwner(sgqlc.types.Union): + """Types that can own a verifiable domain.""" + + __schema__ = github_schema + __types__ = (Enterprise, Organization) + + +######################################################################## +# Schema Entry Points +######################################################################## +github_schema.query_type = Query +github_schema.mutation_type = Mutation +github_schema.subscription_type = None diff --git a/airbyte-integrations/connectors/source-github/source_github/graphql.py b/airbyte-integrations/connectors/source-github/source_github/graphql.py new file mode 100644 index 000000000000..3adbbe794ec5 --- /dev/null +++ b/airbyte-integrations/connectors/source-github/source_github/graphql.py @@ -0,0 +1,98 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sgqlc.operation + +from . import github_schema + +_schema = github_schema +_schema_root = _schema.github_schema + + +def get_query_pull_requests(owner, name, first, after, direction): + kwargs = {"first": first, "order_by": {"field": "UPDATED_AT", "direction": direction}} + if after: + kwargs["after"] = after + + op = sgqlc.operation.Operation(_schema_root.query_type) + repository = op.repository(owner=owner, name=name) + repository.name() + repository.owner.login() + pull_requests = repository.pull_requests(**kwargs) + pull_requests.nodes.__fields__( + id="node_id", + database_id="id", + number=True, + updated_at="updated_at", + changed_files="changed_files", + deletions=True, + additions=True, + merged=True, + mergeable=True, + can_be_rebased="can_be_rebased", + maintainer_can_modify="maintainer_can_modify", + merge_state_status="merge_state_status", + ) + pull_requests.nodes.comments.__fields__(total_count=True) + pull_requests.nodes.commits.__fields__(total_count=True) + reviews = pull_requests.nodes.reviews(first=100, __alias__="review_comments") + reviews.total_count() + reviews.nodes.comments.__fields__(total_count=True) + user = pull_requests.nodes.merged_by(__alias__="merged_by").__as__(_schema_root.User) + user.__fields__( + id="node_id", + database_id="id", + login=True, + avatar_url="avatar_url", + url="html_url", + is_site_admin="site_admin", + ) + pull_requests.page_info.__fields__(has_next_page=True, end_cursor=True) + return str(op) + + +def get_query_reviews(owner, name, first, after, number=None): + op = sgqlc.operation.Operation(_schema_root.query_type) + repository = op.repository(owner=owner, name=name) + repository.name() + repository.owner.login() + if number: + pull_request = repository.pull_request(number=number) + else: + kwargs = {"first": first, "order_by": {"field": "UPDATED_AT", "direction": "ASC"}} + if after: + kwargs["after"] = after + pull_requests = repository.pull_requests(**kwargs) + pull_requests.page_info.__fields__(has_next_page=True, end_cursor=True) + pull_request = pull_requests.nodes + + pull_request.__fields__(number=True, url=True) + kwargs = {"first": first} + if number and after: + kwargs["after"] = after + reviews = pull_request.reviews(**kwargs) + reviews.page_info.__fields__(has_next_page=True, end_cursor=True) + reviews.nodes.__fields__( + id="node_id", + database_id="id", + body=True, + state=True, + url="html_url", + author_association="author_association", + submitted_at="submitted_at", + created_at="created_at", + updated_at="updated_at", + ) + reviews.nodes.commit.oid() + user = reviews.nodes.author(__alias__="user").__as__(_schema_root.User) + user.__fields__( + id="node_id", + database_id="id", + login=True, + avatar_url="avatar_url", + url="html_url", + is_site_admin="site_admin", + ) + return str(op) diff --git a/airbyte-integrations/connectors/source-github/source_github/schemas/pull_request_stats.json b/airbyte-integrations/connectors/source-github/source_github/schemas/pull_request_stats.json index 90ebf80f14a2..2185529c7980 100644 --- a/airbyte-integrations/connectors/source-github/source_github/schemas/pull_request_stats.json +++ b/airbyte-integrations/connectors/source-github/source_github/schemas/pull_request_stats.json @@ -18,16 +18,42 @@ "type": ["null", "boolean"] }, "mergeable": { - "type": ["null", "boolean"] + "type": ["null", "string"] }, - "rebaseable": { + "can_be_rebased": { "type": ["null", "boolean"] }, - "mergeable_state": { + "merge_state_status": { "type": ["null", "string"] }, "merged_by": { - "$ref": "user.json" + "$ref": "user_graphql.json" + }, + "merged_by": { + "type": ["null", "object"], + "properties": { + "login": { + "type": ["null", "string"] + }, + "id": { + "type": ["null", "integer"] + }, + "node_id": { + "type": ["null", "string"] + }, + "avatar_url": { + "type": ["null", "string"] + }, + "html_url": { + "type": ["null", "string"] + }, + "type": { + "type": ["null", "string"] + }, + "site_admin": { + "type": ["null", "boolean"] + } + } }, "comments": { "type": ["null", "integer"] diff --git a/airbyte-integrations/connectors/source-github/source_github/schemas/reviews.json b/airbyte-integrations/connectors/source-github/source_github/schemas/reviews.json index d6e480e9a1c3..b5e41c47b169 100644 --- a/airbyte-integrations/connectors/source-github/source_github/schemas/reviews.json +++ b/airbyte-integrations/connectors/source-github/source_github/schemas/reviews.json @@ -12,7 +12,7 @@ "type": ["null", "string"] }, "user": { - "$ref": "user.json" + "$ref": "user_graphql.json" }, "body": { "type": ["null", "string"] @@ -51,7 +51,11 @@ "type": ["null", "string"], "format": "date-time" }, - "pull_request_updated_at": { + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { "type": "string", "format": "date-time" }, diff --git a/airbyte-integrations/connectors/source-github/source_github/schemas/shared/user_graphql.json b/airbyte-integrations/connectors/source-github/source_github/schemas/shared/user_graphql.json new file mode 100644 index 000000000000..bd42807967a2 --- /dev/null +++ b/airbyte-integrations/connectors/source-github/source_github/schemas/shared/user_graphql.json @@ -0,0 +1,26 @@ +{ + "type": ["null", "object"], + "properties": { + "login": { + "type": ["null", "string"] + }, + "id": { + "type": ["null", "integer"] + }, + "node_id": { + "type": ["null", "string"] + }, + "avatar_url": { + "type": ["null", "string"] + }, + "html_url": { + "type": ["null", "string"] + }, + "type": { + "type": ["null", "string"] + }, + "site_admin": { + "type": ["null", "boolean"] + } + } +} diff --git a/airbyte-integrations/connectors/source-github/source_github/source.py b/airbyte-integrations/connectors/source-github/source_github/source.py index fc8153131fa6..ce1aa68601c3 100644 --- a/airbyte-integrations/connectors/source-github/source_github/source.py +++ b/airbyte-integrations/connectors/source-github/source_github/source.py @@ -212,18 +212,18 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: projects_stream, PullRequestCommentReactions(**repository_args_with_start_date), PullRequestCommits(parent=pull_requests_stream, **repository_args), - PullRequestStats(parent=pull_requests_stream, **repository_args_with_start_date), + PullRequestStats(**repository_args_with_start_date), pull_requests_stream, Releases(**repository_args_with_start_date), Repositories(**organization_args_with_start_date), ReviewComments(**repository_args_with_start_date), - Reviews(parent=pull_requests_stream, **repository_args_with_start_date), + Reviews(**repository_args_with_start_date), Stargazers(**repository_args_with_start_date), Tags(**repository_args), teams_stream, team_members_stream, Users(**organization_args), - Workflows(**repository_args), - WorkflowRuns(**repository_args), + Workflows(**repository_args_with_start_date), + WorkflowRuns(**repository_args_with_start_date), TeamMemberships(parent=team_members_stream, **repository_args), ] diff --git a/airbyte-integrations/connectors/source-github/source_github/streams.py b/airbyte-integrations/connectors/source-github/source_github/streams.py index 4f3e0881cbc6..30eebe39b72a 100644 --- a/airbyte-integrations/connectors/source-github/source_github/streams.py +++ b/airbyte-integrations/connectors/source-github/source_github/streams.py @@ -4,16 +4,16 @@ import time from abc import ABC, abstractmethod -from copy import deepcopy from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Union from urllib import parse import pendulum import requests from airbyte_cdk.models import SyncMode -from airbyte_cdk.sources.streams.http import HttpStream, HttpSubStream +from airbyte_cdk.sources.streams.http import HttpStream from requests.exceptions import HTTPError +from .graphql import get_query_pull_requests, get_query_reviews from .utils import getter DEFAULT_PAGE_SIZE = 100 @@ -59,21 +59,24 @@ def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, def should_retry(self, response: requests.Response) -> bool: # We don't call `super()` here because we have custom error handling and GitHub API sometimes returns strange # errors. So in `read_records()` we have custom error handling which don't require to call `super()` here. - retry_flag = response.headers.get("X-RateLimit-Remaining") == "0" or response.status_code in ( - requests.codes.SERVER_ERROR, - requests.codes.BAD_GATEWAY, + retry_flag = ( + # Rate limit HTTP headers + # https://docs.github.com/en/rest/overview/resources-in-the-rest-api#rate-limit-http-headers + response.headers.get("X-RateLimit-Remaining") == "0" + # Secondary rate limits + # https://docs.github.com/en/rest/overview/resources-in-the-rest-api#secondary-rate-limits + or response.headers.get("Retry-After") + or response.status_code + in ( + requests.codes.SERVER_ERROR, + requests.codes.BAD_GATEWAY, + ) ) if retry_flag: self.logger.info( f"Rate limit handling for stream `{self.name}` for the response with {response.status_code} status code with message: {response.text}" ) - # Handling secondary rate limits for Github - # Additional information here: https://docs.github.com/en/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits - elif response.headers.get("Retry-After"): - time_delay = int(response.headers["Retry-After"]) - self.logger.info(f"Handling Secondary Rate limits, setting sync delay for {time_delay} second(s)") - time.sleep(time_delay) return retry_flag def backoff_time(self, response: requests.Response) -> Union[int, float]: @@ -84,6 +87,10 @@ def backoff_time(self, response: requests.Response) -> Union[int, float]: if response.status_code == requests.codes.SERVER_ERROR: return None + retry_after = int(response.headers.get("Retry-After", 0)) + if retry_after: + return retry_after + reset_time = response.headers.get("X-RateLimit-Reset") backoff_time = float(reset_time) - time.time() if reset_time else 60 @@ -191,7 +198,7 @@ class SemiIncrementalMixin: # records we can just stop and not process other record. This will increase speed of each incremental stream # which supports those 2 request parameters. Currently only `IssueMilestones` and `PullRequests` streams are # supporting this. - is_sorted_descending = False + is_sorted = False def __init__(self, start_date: str = "", **kwargs): super().__init__(**kwargs) @@ -211,9 +218,8 @@ def convert_cursor_value(self, value): @property def state_checkpoint_interval(self) -> Optional[int]: - if not self.is_sorted_descending: + if self.is_sorted == "asc": return self.page_size - return None def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]): """ @@ -237,9 +243,10 @@ def _get_starting_point(self, stream_state: Mapping[str, Any], stream_slice: Map return self._start_date def get_starting_point(self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any]) -> str: - if self.__slice_key not in self._starting_point_cache: - self._starting_point_cache[self.__slice_key] = self._get_starting_point(stream_state, stream_slice) - return self._starting_point_cache[self.__slice_key] + slice_value = stream_slice[self.__slice_key] + if slice_value not in self._starting_point_cache: + self._starting_point_cache[slice_value] = self._get_starting_point(stream_state, stream_slice) + return self._starting_point_cache[slice_value] def read_records( self, @@ -255,7 +262,7 @@ def read_records( cursor_value = self.convert_cursor_value(record[self.cursor_field]) if cursor_value > start_point: yield record - elif self.is_sorted_descending and cursor_value < start_point: + elif self.is_sorted == "desc" and cursor_value < start_point: break def stream_slices(self, **kwargs) -> Iterable[Optional[Mapping[str, Any]]]: @@ -352,7 +359,7 @@ class Repositories(SemiIncrementalMixin, Organizations): API docs: https://docs.github.com/en/rest/reference/repos#list-organization-repositories """ - is_sorted_descending = True + is_sorted = "desc" stream_base_params = { "sort": "updated", "direction": "desc", @@ -436,12 +443,11 @@ class Events(SemiIncrementalMixin, GithubStream): class PullRequests(SemiIncrementalMixin, GithubStream): """ - API docs: https://docs.github.com/en/rest/reference/pulls#list-pull-requests + API docs: https://docs.github.com/en/rest/pulls/pulls#list-pull-requests """ use_cache = True large_stream = True - first_read_override_key = "first_read_override" def __init__(self, **kwargs): super().__init__(**kwargs) @@ -451,7 +457,7 @@ def read_records(self, stream_state: Mapping[str, Any] = None, **kwargs) -> Iter """ Decide if this a first read or not by the presence of the state object """ - self._first_read = not bool(stream_state) or stream_state.get(self.first_read_override_key, False) + self._first_read = not bool(stream_state) yield from super().read_records(stream_state=stream_state, **kwargs) def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: @@ -470,16 +476,18 @@ def request_params(self, **kwargs) -> MutableMapping[str, Any]: base_params = super().request_params(**kwargs) # The very first time we read this stream we want to read ascending so we can save state in case of # a halfway failure. But if there is state, we read descending to allow incremental behavior. - params = {"state": "all", "sort": "updated", "direction": "desc" if self.is_sorted_descending else "asc"} + params = {"state": "all", "sort": "updated", "direction": self.is_sorted} return {**base_params, **params} @property - def is_sorted_descending(self) -> bool: + def is_sorted(self) -> str: """ Depending if there any state we read stream in ascending or descending order. """ - return not self._first_read + if self._first_read: + return "asc" + return "desc" class CommitComments(SemiIncrementalMixin, GithubStream): @@ -498,7 +506,7 @@ class IssueMilestones(SemiIncrementalMixin, GithubStream): API docs: https://docs.github.com/en/rest/reference/issues#list-milestones """ - is_sorted_descending = True + is_sorted = "desc" stream_base_params = { "state": "all", "sort": "updated", @@ -655,11 +663,12 @@ def get_starting_point(self, stream_state: Mapping[str, Any], stream_slice: Mapp class Issues(IncrementalMixin, GithubStream): """ - API docs: https://docs.github.com/en/rest/reference/issues#list-repository-issues + API docs: https://docs.github.com/en/rest/issues/issues#list-repository-issues """ use_cache = True large_stream = True + is_sorted = "asc" stream_base_params = { "state": "all", @@ -680,91 +689,163 @@ def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: return f"repos/{stream_slice['repository']}/pulls/comments" -# Pull request substreams - +class PullRequestStats(SemiIncrementalMixin, GithubStream): + """ + API docs: https://docs.github.com/en/graphql/reference/objects#pullrequest + """ -class PullRequestSubstream(HttpSubStream, SemiIncrementalMixin, GithubStream, ABC): - def __init__(self, parent: PullRequests, **kwargs): - super().__init__(parent=parent, **kwargs) + is_sorted = "asc" + http_method = "POST" - def stream_slices( - self, sync_mode: SyncMode, cursor_field: List[str] = None, stream_state: Mapping[str, Any] = None - ) -> Iterable[Optional[Mapping[str, Any]]]: - """ - Override the parent PullRequests stream configuration to always fetch records in ascending order - """ - parent_state = deepcopy(stream_state) or {} - parent_state[PullRequests.first_read_override_key] = True - parent_stream_slices = super().stream_slices(sync_mode=sync_mode, cursor_field=cursor_field, stream_state=parent_state) - for parent_stream_slice in parent_stream_slices: - yield { - "pull_request_updated_at": parent_stream_slice["parent"]["updated_at"], - "pull_request_number": parent_stream_slice["parent"]["number"], - "repository": parent_stream_slice["parent"]["repository"], - } + def path( + self, *, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + return "graphql" - def read_records( - self, - sync_mode: SyncMode, - cursor_field: List[str] = None, - stream_slice: Mapping[str, Any] = None, - stream_state: Mapping[str, Any] = None, - ) -> Iterable[Mapping[str, Any]]: - """ - We've already determined the list of pull requests to run the stream against. - Skip the start_point_map and cursor_field logic in SemiIncrementalMixin.read_records. - """ - yield from super(SemiIncrementalMixin, self).read_records( - sync_mode=sync_mode, cursor_field=cursor_field, stream_slice=stream_slice, stream_state=stream_state - ) + def raise_error_from_response(self, response_json): + if "errors" in response_json: + raise Exception(str(response_json["errors"])) + def _get_name(self, repository): + return repository["owner"]["login"] + "/" + repository["name"] -class PullRequestStats(PullRequestSubstream): - """ - API docs: https://docs.github.com/en/rest/reference/pulls#get-a-pull-request - """ + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + self.raise_error_from_response(response_json=response.json()) + repository = response.json()["data"]["repository"] + if repository: + nodes = repository["pullRequests"]["nodes"] + for record in nodes: + record["review_comments"] = sum([node["comments"]["totalCount"] for node in record["review_comments"]["nodes"]]) + record["comments"] = record["comments"]["totalCount"] + record["commits"] = record["commits"]["totalCount"] + record["repository"] = self._get_name(repository) + if record["merged_by"]: + record["merged_by"]["type"] = record["merged_by"].pop("__typename") + yield record - @property - def record_keys(self) -> List[str]: - return list(self.get_json_schema()["properties"].keys()) + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + repository = response.json()["data"]["repository"] + if repository: + pageInfo = repository["pullRequests"]["pageInfo"] + if pageInfo["hasNextPage"]: + return {"after": pageInfo["endCursor"]} - def path( - self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None - ) -> str: - return f"repos/{stream_slice['repository']}/pulls/{stream_slice['pull_request_number']}" + def request_params( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> MutableMapping[str, Any]: + return {} - def parse_response(self, response: requests.Response, stream_slice: Mapping[str, Any], **kwargs) -> Iterable[Mapping]: - yield self.transform(record=response.json(), stream_slice=stream_slice) + def request_body_json( + self, + stream_state: Mapping[str, Any], + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> Optional[Mapping]: + organization, name = stream_slice["repository"].split("/") + if next_page_token: + next_page_token = next_page_token["after"] + query = get_query_pull_requests( + owner=organization, name=name, first=self.page_size, after=next_page_token, direction=self.is_sorted.upper() + ) + return {"query": query} - def transform(self, record: MutableMapping[str, Any], stream_slice: Mapping[str, Any]) -> MutableMapping[str, Any]: - record = super().transform(record=record, stream_slice=stream_slice) - return {key: value for key, value in record.items() if key in self.record_keys} + def request_headers(self, **kwargs) -> Mapping[str, Any]: + base_headers = super().request_headers(**kwargs) + # https://docs.github.com/en/graphql/overview/schema-previews#merge-info-preview + headers = {"Accept": "application/vnd.github.merge-info-preview+json"} + return {**base_headers, **headers} -class Reviews(PullRequestSubstream): +class Reviews(SemiIncrementalMixin, GithubStream): """ - API docs: https://docs.github.com/en/rest/reference/pulls#list-reviews-for-a-pull-request + API docs: https://docs.github.com/en/graphql/reference/objects#pullrequestreview """ - cursor_field = "pull_request_updated_at" + is_sorted = False + http_method = "POST" + cursor_field = "updated_at" + + def __init__(self, **kwargs): + super().__init__(**kwargs) + self.pull_requests_cursor = {} + self.reviews_cursors = {} def path( - self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + self, *, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None ) -> str: - return f"repos/{stream_slice['repository']}/pulls/{stream_slice['pull_request_number']}/reviews" + return "graphql" - # Set the parent stream state's cursor field before fetching its records - def stream_slices(self, stream_state: Mapping[str, Any] = None, **kwargs) -> Iterable[Optional[Mapping[str, Any]]]: - parent_state = deepcopy(stream_state) or {} - for repository in self.repositories: - if repository in parent_state and self.cursor_field in parent_state[repository]: - parent_state[repository][self.parent.cursor_field] = parent_state[repository][self.cursor_field] - yield from super().stream_slices(stream_state=parent_state, **kwargs) + def request_params( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> MutableMapping[str, Any]: + return {} + + def raise_error_from_response(self, response_json): + if "errors" in response_json: + raise Exception(str(response_json["errors"])) + + def _get_records(self, pull_request, repository_name): + "yield review records from pull_request" + for record in pull_request["reviews"]["nodes"]: + record["repository"] = repository_name + record["pull_request_url"] = pull_request["url"] + if record["commit"]: + record["commit_id"] = record.pop("commit")["oid"] + record["user"]["type"] = record["user"].pop("__typename") + # for backward compatibility with REST API response + record["_links"] = { + "html": {"href": record["html_url"]}, + "pull_request": {"href": record["pull_request_url"]}, + } + yield record - def transform(self, record: MutableMapping[str, Any], stream_slice: Mapping[str, Any]) -> MutableMapping[str, Any]: - record = super().transform(record=record, stream_slice=stream_slice) - record[self.cursor_field] = stream_slice[self.cursor_field] - return record + def _get_name(self, repository): + return repository["owner"]["login"] + "/" + repository["name"] + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + self.raise_error_from_response(response_json=response.json()) + repository = response.json()["data"]["repository"] + if repository: + repository_name = self._get_name(repository) + if "pullRequests" in repository: + for pull_request in repository["pullRequests"]["nodes"]: + yield from self._get_records(pull_request, repository_name) + elif "pullRequest" in repository: + yield from self._get_records(repository["pullRequest"], repository_name) + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + repository = response.json()["data"]["repository"] + if repository: + repository_name = self._get_name(repository) + reviews_cursors = self.reviews_cursors.setdefault(repository_name, {}) + if "pullRequests" in repository: + if repository["pullRequests"]["pageInfo"]["hasNextPage"]: + self.pull_requests_cursor[repository_name] = repository["pullRequests"]["pageInfo"]["endCursor"] + for pull_request in repository["pullRequests"]["nodes"]: + if pull_request["reviews"]["pageInfo"]["hasNextPage"]: + pull_request_number = pull_request["number"] + reviews_cursors[pull_request_number] = pull_request["reviews"]["pageInfo"]["endCursor"] + elif "pullRequest" in repository: + if repository["pullRequest"]["reviews"]["pageInfo"]["hasNextPage"]: + pull_request_number = repository["pullRequest"]["number"] + reviews_cursors[pull_request_number] = repository["pullRequest"]["reviews"]["pageInfo"]["endCursor"] + if reviews_cursors: + number, after = reviews_cursors.popitem() + return {"after": after, "number": number} + if repository_name in self.pull_requests_cursor: + return {"after": self.pull_requests_cursor.pop(repository_name)} + + def request_body_json( + self, + stream_state: Mapping[str, Any], + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> Optional[Mapping]: + organization, name = stream_slice["repository"].split("/") + if not next_page_token: + next_page_token = {"after": None} + query = get_query_reviews(owner=organization, name=name, first=self.page_size, **next_page_token) + return {"query": query} class PullRequestCommits(GithubStream): @@ -906,7 +987,7 @@ class PullRequestCommentReactions(ReactionStream): class Deployments(SemiIncrementalMixin, GithubStream): """ - API docs: https://docs.github.com/en/rest/reference/deployments#list-deployments + API docs: https://docs.github.com/en/rest/deployments/deployments#list-deployments """ def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: @@ -1056,7 +1137,7 @@ def transform(self, record: MutableMapping[str, Any], stream_slice: Mapping[str, class Workflows(SemiIncrementalMixin, GithubStream): """ Get all workflows of a GitHub repository - API documentation: https://docs.github.com/en/rest/reference/actions#workflows + API documentation: https://docs.github.com/en/rest/actions/workflows#list-repository-workflows """ def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: @@ -1073,13 +1154,16 @@ def convert_cursor_value(self, value): class WorkflowRuns(SemiIncrementalMixin, GithubStream): """ - Get all workflows of a GitHub repository - API documentation: https://docs.github.com/en/rest/reference/actions#list-workflow-runs-for-a-repository + Get all workflow runs for a GitHub repository + API documentation: https://docs.github.com/en/rest/actions/workflow-runs#list-workflow-runs-for-a-repository """ # key for accessing slice value from record record_slice_key = ["repository", "full_name"] + # https://docs.github.com/en/actions/managing-workflow-runs/re-running-workflows-and-jobs + re_run_period = 32 # days + def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: return f"repos/{stream_slice['repository']}/actions/runs" @@ -1088,6 +1172,31 @@ def parse_response(self, response: requests.Response, stream_slice: Mapping[str, for record in response: yield record + def read_records( + self, + sync_mode: SyncMode, + cursor_field: List[str] = None, + stream_slice: Mapping[str, Any] = None, + stream_state: Mapping[str, Any] = None, + ) -> Iterable[Mapping[str, Any]]: + # Records in the workflows_runs stream are naturally descending sorted by `created_at` field. + # On first sight this is not big deal because cursor_field is `updated_at`. + # But we still can use `created_at` as a breakpoint because after 30 days period + # https://docs.github.com/en/actions/managing-workflow-runs/re-running-workflows-and-jobs + # workflows_runs records cannot be updated. It means if we initially fully synced stream on subsequent incremental sync we need + # only to look behind on 30 days to find all records which were updated. + start_point = self.get_starting_point(stream_state=stream_state, stream_slice=stream_slice) + break_point = (pendulum.parse(start_point) - pendulum.duration(days=self.re_run_period)).to_iso8601_string() + for record in super(SemiIncrementalMixin, self).read_records( + sync_mode=sync_mode, cursor_field=cursor_field, stream_slice=stream_slice, stream_state=stream_state + ): + cursor_value = record[self.cursor_field] + created_at = record["created_at"] + if cursor_value > start_point: + yield record + if created_at < break_point: + break + class TeamMembers(GithubStream): """ diff --git a/airbyte-integrations/connectors/source-github/unit_tests/graphql_reviews_responses.json b/airbyte-integrations/connectors/source-github/unit_tests/graphql_reviews_responses.json new file mode 100644 index 000000000000..85706e461fdd --- /dev/null +++ b/airbyte-integrations/connectors/source-github/unit_tests/graphql_reviews_responses.json @@ -0,0 +1,405 @@ +[ + { + "data": { + "repository": { + "owner": { + "login": "airbytehq" + }, + "name": "airbyte", + "pullRequests": { + "nodes": [ + { + "number": 1, + "url": "https://github.com/airbytehq/airbyte/pull/1", + "reviews": { + "nodes": [ + { + "id": 1000, + "updated_at": "2000-01-01T00:00:01Z", + "html_url": "https://github.com/airbytehq/airbyte/pull/1#pullrequestreview-1000", + "commit": null, + "user": { + "__typename": "User" + } + }, + { + "id": 1001, + "updated_at": "2000-01-01T00:00:01Z", + "html_url": "https://github.com/airbytehq/airbyte/pull/1#pullrequestreview-1001", + "commit": null, + "user": { + "__typename": "User" + } + } + ], + "pageInfo": { + "endCursor": "cursor", + "hasNextPage": true + } + } + }, + { + "number": 2, + "url": "https://github.com/airbytehq/airbyte/pull/2", + "reviews": { + "nodes": [ + { + "id": 1002, + "updated_at": "2000-01-01T00:00:01Z", + "html_url": "https://github.com/airbytehq/airbyte/pull/2#pullrequestreview-1002", + "commit": null, + "user": { + "__typename": "User" + } + }, + { + "id": 1003, + "updated_at": "2000-01-01T00:00:01Z", + "html_url": "https://github.com/airbytehq/airbyte/pull/2#pullrequestreview-1003", + "commit": null, + "user": { + "__typename": "User" + } + } + ], + "pageInfo": { + "endCursor": "cursor", + "hasNextPage": true + } + } + } + ], + "pageInfo": { + "endCursor": "cursor", + "hasNextPage": true + } + } + } + } + }, + { + "data": { + "repository": { + "owner": { + "login": "airbytehq" + }, + "name": "airbyte", + "pullRequest": { + "number": 1, + "url": "https://github.com/airbytehq/airbyte/pull/1", + "reviews": { + "nodes": [ + { + "id": 1004, + "updated_at": "2000-01-01T00:00:01Z", + "html_url": "https://github.com/airbytehq/airbyte/pull/1#pullrequestreview-1004", + "commit": null, + "user": { + "__typename": "User" + } + }, + { + "id": 1005, + "updated_at": "2000-01-01T00:00:01Z", + "html_url": "https://github.com/airbytehq/airbyte/pull/1#pullrequestreview-1005", + "commit": null, + "user": { + "__typename": "User" + } + } + ], + "pageInfo": { + "endCursor": null, + "hasNextPage": false + } + } + } + } + } + }, + { + "data": { + "repository": { + "owner": { + "login": "airbytehq" + }, + "name": "airbyte", + "pullRequest": { + "number": 2, + "url": "https://github.com/airbytehq/airbyte/pull/2", + "reviews": { + "nodes": [ + { + "id": 1006, + "updated_at": "2000-01-01T00:00:01Z", + "html_url": "https://github.com/airbytehq/airbyte/pull/2#pullrequestreview-1006", + "commit": null, + "user": { + "__typename": "User" + } + }, + { + "id": 1007, + "updated_at": "2000-01-01T00:00:01Z", + "html_url": "https://github.com/airbytehq/airbyte/pull/2#pullrequestreview-1007", + "commit": null, + "user": { + "__typename": "User" + } + } + ], + "pageInfo": { + "endCursor": null, + "hasNextPage": false + } + } + } + } + } + }, + { + "data": { + "repository": { + "owner": { + "login": "airbytehq" + }, + "name": "airbyte", + "pullRequests": { + "nodes": [ + { + "number": 3, + "url": "https://github.com/airbytehq/airbyte/pull/3", + "reviews": { + "nodes": [ + { + "id": 1008, + "updated_at": "2000-01-01T00:00:01Z", + "html_url": "https://github.com/airbytehq/airbyte/pull/3#pullrequestreview-1008", + "commit": null, + "user": { + "__typename": "User" + } + } + ], + "pageInfo": { + "endCursor": null, + "hasNextPage": false + } + } + } + ], + "pageInfo": { + "endCursor": null, + "hasNextPage": false + } + } + } + } + }, + { + "data": { + "repository": { + "owner": { + "login": "airbytehq" + }, + "name": "airbyte", + "pullRequests": { + "nodes": [ + { + "number": 1, + "url": "https://github.com/airbytehq/airbyte/pull/1", + "reviews": { + "nodes": [ + { + "id": 1000, + "updated_at": "2000-01-01T00:00:02Z", + "html_url": "https://github.com/airbytehq/airbyte/pull/1#pullrequestreview-1000", + "commit": null, + "user": { + "__typename": "User" + } + }, + { + "id": 1001, + "updated_at": "2000-01-01T00:00:01Z", + "html_url": "https://github.com/airbytehq/airbyte/pull/1#pullrequestreview-1001", + "commit": null, + "user": { + "__typename": "User" + } + } + ], + "pageInfo": { + "endCursor": "cursor", + "hasNextPage": true + } + } + }, + { + "number": 2, + "url": "https://github.com/airbytehq/airbyte/pull/2", + "reviews": { + "nodes": [ + { + "id": 1002, + "updated_at": "2000-01-01T00:00:01Z", + "html_url": "https://github.com/airbytehq/airbyte/pull/2#pullrequestreview-1002", + "commit": null, + "user": { + "__typename": "User" + } + }, + { + "id": 1003, + "updated_at": "2000-01-01T00:00:01Z", + "html_url": "https://github.com/airbytehq/airbyte/pull/2#pullrequestreview-1003", + "commit": null, + "user": { + "__typename": "User" + } + } + ], + "pageInfo": { + "endCursor": "cursor", + "hasNextPage": true + } + } + } + ], + "pageInfo": { + "endCursor": "cursor", + "hasNextPage": true + } + } + } + } + }, + { + "data": { + "repository": { + "owner": { + "login": "airbytehq" + }, + "name": "airbyte", + "pullRequest": { + "number": 1, + "url": "https://github.com/airbytehq/airbyte/pull/1", + "reviews": { + "nodes": [ + { + "id": 1004, + "updated_at": "2000-01-01T00:00:01Z", + "html_url": "https://github.com/airbytehq/airbyte/pull/1#pullrequestreview-1004", + "commit": null, + "user": { + "__typename": "User" + } + }, + { + "id": 1005, + "updated_at": "2000-01-01T00:00:01Z", + "html_url": "https://github.com/airbytehq/airbyte/pull/1#pullrequestreview-1005", + "commit": null, + "user": { + "__typename": "User" + } + } + ], + "pageInfo": { + "endCursor": null, + "hasNextPage": false + } + } + } + } + } + }, + { + "data": { + "repository": { + "owner": { + "login": "airbytehq" + }, + "name": "airbyte", + "pullRequest": { + "number": 2, + "url": "https://github.com/airbytehq/airbyte/pull/2", + "reviews": { + "nodes": [ + { + "id": 1006, + "updated_at": "2000-01-01T00:00:01Z", + "html_url": "https://github.com/airbytehq/airbyte/pull/2#pullrequestreview-1006", + "commit": null, + "user": { + "__typename": "User" + } + }, + { + "id": 1007, + "updated_at": "2000-01-01T00:00:02Z", + "html_url": "https://github.com/airbytehq/airbyte/pull/2#pullrequestreview-1007", + "commit": null, + "user": { + "__typename": "User" + } + } + ], + "pageInfo": { + "endCursor": null, + "hasNextPage": false + } + } + } + } + } + }, + { + "data": { + "repository": { + "owner": { + "login": "airbytehq" + }, + "name": "airbyte", + "pullRequests": { + "nodes": [ + { + "number": 3, + "url": "https://github.com/airbytehq/airbyte/pull/3", + "reviews": { + "nodes": [ + { + "id": 1008, + "updated_at": "2000-01-01T00:00:01Z", + "html_url": "https://github.com/airbytehq/airbyte/pull/3#pullrequestreview-1008", + "commit": null, + "user": { + "__typename": "User" + } + }, + { + "id": 1009, + "updated_at": "2000-01-01T00:00:02Z", + "html_url": "https://github.com/airbytehq/airbyte/pull/3#pullrequestreview-1009", + "commit": null, + "user": { + "__typename": "User" + } + } + ], + "pageInfo": { + "endCursor": null, + "hasNextPage": false + } + } + } + ], + "pageInfo": { + "endCursor": null, + "hasNextPage": false + } + } + } + } + } +] diff --git a/airbyte-integrations/connectors/source-github/unit_tests/test_stream.py b/airbyte-integrations/connectors/source-github/unit_tests/test_stream.py index 3808f9df3f2e..926cc2a1b8f2 100644 --- a/airbyte-integrations/connectors/source-github/unit_tests/test_stream.py +++ b/airbyte-integrations/connectors/source-github/unit_tests/test_stream.py @@ -2,7 +2,9 @@ # Copyright (c) 2022 Airbyte, Inc., all rights reserved. # +import json from http import HTTPStatus +from pathlib import Path from unittest.mock import MagicMock, patch import pytest @@ -10,6 +12,7 @@ import responses from airbyte_cdk.sources.streams.http.exceptions import BaseBackoffException from responses import matchers +from source_github import streams from source_github.streams import ( Branches, Collaborators, @@ -37,9 +40,10 @@ TeamMemberships, Teams, Users, + WorkflowRuns, ) -from .utils import ProjectsResponsesAPI, read_full_refresh, read_incremental, urlbase +from .utils import ProjectsResponsesAPI, read_full_refresh, read_incremental DEFAULT_BACKOFF_DELAYS = [5, 10, 20, 40, 80] @@ -66,15 +70,18 @@ def test_internal_server_error_retry(time_mock): @pytest.mark.parametrize( - ("http_status", "response_text", "expected_backoff_time"), + ("http_status", "response_headers", "expected_backoff_time"), [ - (HTTPStatus.BAD_GATEWAY, "", 60), + (HTTPStatus.BAD_GATEWAY, {}, 60), + (HTTPStatus.FORBIDDEN, {"Retry-After": 120}, 120), + (HTTPStatus.FORBIDDEN, {"X-RateLimit-Reset": 1655804724}, 300.0), ], ) -def test_backoff_time(http_status, response_text, expected_backoff_time): +@patch("time.time", return_value=1655804424.0) +def test_backoff_time(time_mock, http_status, response_headers, expected_backoff_time): response_mock = MagicMock() response_mock.status_code = http_status - response_mock.text = response_text + response_mock.headers = response_headers args = {"authenticator": None, "repositories": ["test_repo"], "start_date": "start_date", "page_size_for_large_streams": 30} stream = PullRequestCommentReactions(**args) assert stream.backoff_time(response_mock) == expected_backoff_time @@ -83,12 +90,27 @@ def test_backoff_time(http_status, response_text, expected_backoff_time): @responses.activate @patch("time.sleep") def test_retry_after(time_mock): + first_request = True + + def request_callback(request): + nonlocal first_request + if first_request: + first_request = False + return (HTTPStatus.FORBIDDEN, {"Retry-After": "60"}, "") + return (HTTPStatus.OK, {}, '{"login": "airbytehq"}') + + responses.add_callback( + responses.GET, + "https://api.github.com/orgs/airbytehq", + callback=request_callback, + content_type="application/json", + ) + stream = Organizations(organizations=["airbytehq"]) - responses.add("GET", "https://api.github.com/orgs/airbytehq", json={"login": "airbytehq"}, headers={"Retry-After": "10"}) read_full_refresh(stream) - assert time_mock.call_args[0][0] == 10 - assert len(responses.calls) == 1 + assert len(responses.calls) == 2 assert responses.calls[0].request.url == "https://api.github.com/orgs/airbytehq?per_page=100" + assert responses.calls[1].request.url == "https://api.github.com/orgs/airbytehq?per_page=100" @responses.activate @@ -568,9 +590,9 @@ def test_stream_project_cards(): def test_stream_comments(): repository_args_with_start_date = { - "repositories": ["organization/repository"], + "repositories": ["organization/repository", "airbytehq/airbyte"], "page_size_for_large_streams": 2, - "start_date": "2022-02-02T10:10:03Z", + "start_date": "2022-02-02T10:10:01Z", } stream = Comments(**repository_args_with_start_date) @@ -578,10 +600,10 @@ def test_stream_comments(): data = [ {"id": 1, "updated_at": "2022-02-02T10:10:02Z"}, {"id": 2, "updated_at": "2022-02-02T10:10:04Z"}, - {"id": 3, "updated_at": "2022-02-02T10:10:06Z"}, - {"id": 4, "updated_at": "2022-02-02T10:10:08Z"}, - {"id": 5, "updated_at": "2022-02-02T10:10:10Z"}, - {"id": 6, "updated_at": "2022-02-02T10:10:12Z"}, + {"id": 3, "updated_at": "2022-02-02T10:12:06Z"}, + {"id": 4, "updated_at": "2022-02-02T10:12:08Z"}, + {"id": 5, "updated_at": "2022-02-02T10:12:10Z"}, + {"id": 6, "updated_at": "2022-02-02T10:12:12Z"}, ] api_url = "https://api.github.com/repos/organization/repository/issues/comments" @@ -590,39 +612,110 @@ def test_stream_comments(): "GET", api_url, json=data[0:2], - match=[matchers.query_param_matcher({"since": "2022-02-02T10:10:03Z"}, strict_match=False)], + match=[matchers.query_param_matcher({"since": "2022-02-02T10:10:01Z", "per_page": "2"})], ) responses.add( "GET", api_url, - json=data[2:4], + json=data[1:3], headers={ "Link": '; rel="next"' }, - match=[matchers.query_param_matcher({"since": "2022-02-02T10:10:04Z"}, strict_match=False)], + match=[matchers.query_param_matcher({"since": "2022-02-02T10:10:04Z", "per_page": "2"})], + ) + + responses.add( + "GET", + api_url, + json=data[3:5], + headers={ + "Link": '; rel="next"' + }, + match=[matchers.query_param_matcher({"since": "2022-02-02T10:10:04Z", "page": "2", "per_page": "2"})], ) responses.add( "GET", api_url, - json=data[4:6], - match=[matchers.query_param_matcher({"since": "2022-02-02T10:10:04Z", "page": "2", "per_page": "2"}, strict_match=False)], + json=data[5:], + match=[matchers.query_param_matcher({"since": "2022-02-02T10:10:04Z", "page": "3", "per_page": "2"})], + ) + + data = [ + {"id": 1, "updated_at": "2022-02-02T10:11:02Z"}, + {"id": 2, "updated_at": "2022-02-02T10:11:04Z"}, + {"id": 3, "updated_at": "2022-02-02T10:13:06Z"}, + {"id": 4, "updated_at": "2022-02-02T10:13:08Z"}, + {"id": 5, "updated_at": "2022-02-02T10:13:10Z"}, + {"id": 6, "updated_at": "2022-02-02T10:13:12Z"}, + ] + + api_url = "https://api.github.com/repos/airbytehq/airbyte/issues/comments" + + responses.add( + "GET", + api_url, + json=data[0:2], + match=[matchers.query_param_matcher({"since": "2022-02-02T10:10:01Z", "per_page": "2"})], + ) + + responses.add( + "GET", + api_url, + json=data[1:3], + headers={ + "Link": '; rel="next"' + }, + match=[matchers.query_param_matcher({"since": "2022-02-02T10:11:04Z", "per_page": "2"})], + ) + + responses.add( + "GET", + api_url, + json=data[3:5], + headers={ + "Link": '; rel="next"' + }, + match=[matchers.query_param_matcher({"since": "2022-02-02T10:11:04Z", "page": "2", "per_page": "2"})], + ) + + responses.add( + "GET", + api_url, + json=data[5:], + match=[matchers.query_param_matcher({"since": "2022-02-02T10:11:04Z", "page": "3", "per_page": "2"})], ) stream_state = {} records = read_incremental(stream, stream_state) - assert records == [{"id": 2, "repository": "organization/repository", "updated_at": "2022-02-02T10:10:04Z"}] - assert stream_state == {"organization/repository": {"updated_at": "2022-02-02T10:10:04Z"}} + assert records == [ + {"id": 1, "repository": "organization/repository", "updated_at": "2022-02-02T10:10:02Z"}, + {"id": 2, "repository": "organization/repository", "updated_at": "2022-02-02T10:10:04Z"}, + {"id": 1, "repository": "airbytehq/airbyte", "updated_at": "2022-02-02T10:11:02Z"}, + {"id": 2, "repository": "airbytehq/airbyte", "updated_at": "2022-02-02T10:11:04Z"}, + ] + + assert stream_state == { + "airbytehq/airbyte": {"updated_at": "2022-02-02T10:11:04Z"}, + "organization/repository": {"updated_at": "2022-02-02T10:10:04Z"}, + } records = read_incremental(stream, stream_state) assert records == [ - {"id": 3, "repository": "organization/repository", "updated_at": "2022-02-02T10:10:06Z"}, - {"id": 4, "repository": "organization/repository", "updated_at": "2022-02-02T10:10:08Z"}, - {"id": 5, "repository": "organization/repository", "updated_at": "2022-02-02T10:10:10Z"}, - {"id": 6, "repository": "organization/repository", "updated_at": "2022-02-02T10:10:12Z"}, + {"id": 3, "repository": "organization/repository", "updated_at": "2022-02-02T10:12:06Z"}, + {"id": 4, "repository": "organization/repository", "updated_at": "2022-02-02T10:12:08Z"}, + {"id": 5, "repository": "organization/repository", "updated_at": "2022-02-02T10:12:10Z"}, + {"id": 6, "repository": "organization/repository", "updated_at": "2022-02-02T10:12:12Z"}, + {"id": 3, "repository": "airbytehq/airbyte", "updated_at": "2022-02-02T10:13:06Z"}, + {"id": 4, "repository": "airbytehq/airbyte", "updated_at": "2022-02-02T10:13:08Z"}, + {"id": 5, "repository": "airbytehq/airbyte", "updated_at": "2022-02-02T10:13:10Z"}, + {"id": 6, "repository": "airbytehq/airbyte", "updated_at": "2022-02-02T10:13:12Z"}, ] - assert stream_state == {"organization/repository": {"updated_at": "2022-02-02T10:10:12Z"}} + assert stream_state == { + "airbytehq/airbyte": {"updated_at": "2022-02-02T10:13:12Z"}, + "organization/repository": {"updated_at": "2022-02-02T10:12:12Z"}, + } @responses.activate @@ -689,85 +782,38 @@ def get_records(cursor_field): @responses.activate def test_stream_reviews_incremental_read(): - url_pulls = "https://api.github.com/repos/organization/repository/pulls" - repository_args_with_start_date = { "start_date": "2000-01-01T00:00:00Z", "page_size_for_large_streams": 30, - "repositories": ["organization/repository"], + "repositories": ["airbytehq/airbyte"], } - stream = Reviews(parent=PullRequests(**repository_args_with_start_date), **repository_args_with_start_date) + stream = Reviews(**repository_args_with_start_date) + stream.page_size = 2 - responses.add( - "GET", - url_pulls, - json=[ - {"updated_at": "2022-01-01T00:00:00Z", "number": 1}, - {"updated_at": "2022-01-02T00:00:00Z", "number": 2}, - ], - ) + f = Path(__file__).parent / "graphql_reviews_responses.json" + response_objects = json.load(open(f)) - responses.add( - "GET", - "https://api.github.com/repos/organization/repository/pulls/1/reviews", - json=[{"id": 1000, "body": "commit1"}, {"id": 1001, "body": "commit1"}], - ) + def request_callback(request): + return (HTTPStatus.OK, {}, json.dumps(response_objects.pop(0))) - responses.add( - "GET", - "https://api.github.com/repos/organization/repository/pulls/2/reviews", - json=[{"id": 1002, "body": "commit1"}], + responses.add_callback( + responses.POST, + "https://api.github.com/graphql", + callback=request_callback, + content_type="application/json", ) stream_state = {} records = read_incremental(stream, stream_state) + assert [r["id"] for r in records] == [1000, 1001, 1002, 1003, 1004, 1005, 1006, 1007, 1008] + assert stream_state == {"airbytehq/airbyte": {"updated_at": "2000-01-01T00:00:01Z"}} + assert len(responses.calls) == 4 - assert records == [ - {"body": "commit1", "id": 1000, "pull_request_updated_at": "2022-01-01T00:00:00Z", "repository": "organization/repository"}, - {"body": "commit1", "id": 1001, "pull_request_updated_at": "2022-01-01T00:00:00Z", "repository": "organization/repository"}, - {"body": "commit1", "id": 1002, "pull_request_updated_at": "2022-01-02T00:00:00Z", "repository": "organization/repository"}, - ] - - assert stream_state == {"organization/repository": {"pull_request_updated_at": "2022-01-02T00:00:00Z"}} - - responses.add( - "GET", - url_pulls, - json=[ - {"updated_at": "2022-01-03T00:00:00Z", "number": 1}, - {"updated_at": "2022-01-02T00:00:00Z", "number": 2}, - {"updated_at": "2022-01-04T00:00:00Z", "number": 3}, - ], - ) - - responses.add( - "GET", - "https://api.github.com/repos/organization/repository/pulls/1/reviews", - json=[{"id": 1000, "body": "commit1"}, {"id": 1001, "body": "commit2"}], - ) - - responses.add( - "GET", - "https://api.github.com/repos/organization/repository/pulls/3/reviews", - json=[{"id": 1003, "body": "commit1"}], - ) - + responses.calls.reset() records = read_incremental(stream, stream_state) - - assert records == [ - {"body": "commit1", "id": 1000, "pull_request_updated_at": "2022-01-03T00:00:00Z", "repository": "organization/repository"}, - {"body": "commit2", "id": 1001, "pull_request_updated_at": "2022-01-03T00:00:00Z", "repository": "organization/repository"}, - {"body": "commit1", "id": 1003, "pull_request_updated_at": "2022-01-04T00:00:00Z", "repository": "organization/repository"}, - ] - - assert stream_state == {"organization/repository": {"pull_request_updated_at": "2022-01-04T00:00:00Z"}} - - assert len(responses.calls) == 6 - assert urlbase(responses.calls[0].request.url) == url_pulls - # make sure parent stream PullRequests used ascending sorting for both HTTP requests - assert responses.calls[0].request.params["direction"] == "asc" - assert urlbase(responses.calls[3].request.url) == url_pulls - assert responses.calls[3].request.params["direction"] == "asc" + assert [r["id"] for r in records] == [1000, 1007, 1009] + assert stream_state == {"airbytehq/airbyte": {"updated_at": "2000-01-01T00:00:02Z"}} + assert len(responses.calls) == 4 @responses.activate @@ -878,3 +924,122 @@ def test_stream_commit_comment_reactions_incremental_read(): {"id": 154935432, "comment_id": 55538826, "created_at": "2022-02-01T16:00:00Z", "repository": "airbytehq/integration-test"}, {"id": 154935433, "comment_id": 55538827, "created_at": "2022-02-01T17:00:00Z", "repository": "airbytehq/integration-test"}, ] + + +@responses.activate +def test_stream_workflow_runs_read_incremental(monkeypatch): + + repository_args_with_start_date = { + "repositories": ["org/repos"], + "page_size_for_large_streams": 30, + "start_date": "2022-01-01T00:00:00Z", + } + + monkeypatch.setattr(streams, "DEFAULT_PAGE_SIZE", 1) + stream = WorkflowRuns(**repository_args_with_start_date) + + data = [ + {"id": 4, "created_at": "2022-02-05T00:00:00Z", "updated_at": "2022-02-05T00:00:00Z", "repository": {"full_name": "org/repos"}}, + {"id": 3, "created_at": "2022-01-15T00:00:00Z", "updated_at": "2022-01-15T00:00:00Z", "repository": {"full_name": "org/repos"}}, + {"id": 2, "created_at": "2022-01-03T00:00:00Z", "updated_at": "2022-01-03T00:00:00Z", "repository": {"full_name": "org/repos"}}, + {"id": 1, "created_at": "2022-01-02T00:00:00Z", "updated_at": "2022-01-02T00:00:00Z", "repository": {"full_name": "org/repos"}}, + ] + + responses.add( + "GET", + "https://api.github.com/repos/org/repos/actions/runs", + json={"total_count": len(data), "workflow_runs": data[0:1]}, + headers={"Link": '; rel="next"'}, + match=[matchers.query_param_matcher({"per_page": "1"}, strict_match=True)], + ) + + responses.add( + "GET", + "https://api.github.com/repos/org/repos/actions/runs", + json={"total_count": len(data), "workflow_runs": data[1:2]}, + headers={"Link": '; rel="next"'}, + match=[matchers.query_param_matcher({"per_page": "1", "page": "2"}, strict_match=True)], + ) + + responses.add( + "GET", + "https://api.github.com/repos/org/repos/actions/runs", + json={"total_count": len(data), "workflow_runs": data[2:3]}, + headers={"Link": '; rel="next"'}, + match=[matchers.query_param_matcher({"per_page": "1", "page": "3"}, strict_match=True)], + ) + + responses.add( + "GET", + "https://api.github.com/repos/org/repos/actions/runs", + json={"total_count": len(data), "workflow_runs": data[3:4]}, + match=[matchers.query_param_matcher({"per_page": "1", "page": "4"}, strict_match=True)], + ) + + state = {} + records = read_incremental(stream, state) + assert state == {"org/repos": {"updated_at": "2022-02-05T00:00:00Z"}} + + assert records == [ + {"id": 4, "repository": {"full_name": "org/repos"}, "created_at": "2022-02-05T00:00:00Z", "updated_at": "2022-02-05T00:00:00Z"}, + {"id": 3, "repository": {"full_name": "org/repos"}, "created_at": "2022-01-15T00:00:00Z", "updated_at": "2022-01-15T00:00:00Z"}, + {"id": 2, "repository": {"full_name": "org/repos"}, "created_at": "2022-01-03T00:00:00Z", "updated_at": "2022-01-03T00:00:00Z"}, + {"id": 1, "repository": {"full_name": "org/repos"}, "created_at": "2022-01-02T00:00:00Z", "updated_at": "2022-01-02T00:00:00Z"}, + ] + + assert len(responses.calls) == 4 + + data.insert( + 0, + { + "id": 5, + "created_at": "2022-02-07T00:00:00Z", + "updated_at": "2022-02-07T00:00:00Z", + "repository": {"full_name": "org/repos"}, + }, + ) + + data[2]["updated_at"] = "2022-02-08T00:00:00Z" + + responses.add( + "GET", + "https://api.github.com/repos/org/repos/actions/runs", + json={"total_count": len(data), "workflow_runs": data[0:1]}, + headers={"Link": '; rel="next"'}, + match=[matchers.query_param_matcher({"per_page": "1"}, strict_match=True)], + ) + + responses.add( + "GET", + "https://api.github.com/repos/org/repos/actions/runs", + json={"total_count": len(data), "workflow_runs": data[1:2]}, + headers={"Link": '; rel="next"'}, + match=[matchers.query_param_matcher({"per_page": "1", "page": "2"}, strict_match=True)], + ) + + responses.add( + "GET", + "https://api.github.com/repos/org/repos/actions/runs", + json={"total_count": len(data), "workflow_runs": data[2:3]}, + headers={"Link": '; rel="next"'}, + match=[matchers.query_param_matcher({"per_page": "1", "page": "3"}, strict_match=True)], + ) + + responses.add( + "GET", + "https://api.github.com/repos/org/repos/actions/runs", + json={"total_count": len(data), "workflow_runs": data[3:4]}, + headers={"Link": '; rel="next"'}, + match=[matchers.query_param_matcher({"per_page": "1", "page": "4"}, strict_match=True)], + ) + + responses.calls.reset() + records = read_incremental(stream, state) + + assert state == {"org/repos": {"updated_at": "2022-02-08T00:00:00Z"}} + assert records == [ + {"id": 5, "repository": {"full_name": "org/repos"}, "created_at": "2022-02-07T00:00:00Z", "updated_at": "2022-02-07T00:00:00Z"}, + {"id": 3, "repository": {"full_name": "org/repos"}, "created_at": "2022-01-15T00:00:00Z", "updated_at": "2022-02-08T00:00:00Z"}, + ] + + assert len(responses.calls) == 4 diff --git a/airbyte-integrations/connectors/source-github/unit_tests/utils.py b/airbyte-integrations/connectors/source-github/unit_tests/utils.py index 472fe907a89f..b93f134f944d 100644 --- a/airbyte-integrations/connectors/source-github/unit_tests/utils.py +++ b/airbyte-integrations/connectors/source-github/unit_tests/utils.py @@ -3,7 +3,6 @@ # from typing import Any, MutableMapping -from urllib.parse import urlparse import responses from airbyte_cdk.models import SyncMode @@ -29,10 +28,6 @@ def read_incremental(stream_instance: Stream, stream_state: MutableMapping[str, return res -def urlbase(url): - return urlparse(url)._replace(params="", query="", fragment="").geturl() - - class ProjectsResponsesAPI: """ Fake Responses API for github projects, columns, cards diff --git a/airbyte-integrations/connectors/source-gitlab/Dockerfile b/airbyte-integrations/connectors/source-gitlab/Dockerfile index f831f3760d4c..68047e54ba2d 100644 --- a/airbyte-integrations/connectors/source-gitlab/Dockerfile +++ b/airbyte-integrations/connectors/source-gitlab/Dockerfile @@ -13,5 +13,5 @@ RUN pip install . ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.5 +LABEL io.airbyte.version=0.1.6 LABEL io.airbyte.name=airbyte/source-gitlab diff --git a/airbyte-integrations/connectors/source-gitlab/acceptance-test-config.yml b/airbyte-integrations/connectors/source-gitlab/acceptance-test-config.yml index e0be76c660f7..ff6869c4e821 100644 --- a/airbyte-integrations/connectors/source-gitlab/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-gitlab/acceptance-test-config.yml @@ -12,6 +12,7 @@ tests: basic_read: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: ["group_issue_boards"] # We cannot use these tests for testing Incremental, since for Gitlab the State is saved for each Project separately, # and the Acceptance Tests at this stage do not support this functionality. # Therefore, we hardcode the cursor_paths for our config. diff --git a/airbyte-integrations/connectors/source-gitlab/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-gitlab/integration_tests/configured_catalog.json index 5cbafa2a2dd2..5c0d4dc8c29a 100644 --- a/airbyte-integrations/connectors/source-gitlab/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-gitlab/integration_tests/configured_catalog.json @@ -43,6 +43,16 @@ "destination_sync_mode": "append", "cursor_field": ["created_at"] }, + { + "stream": { + "name": "group_issue_boards", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"], + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, { "stream": { "name": "issues", diff --git a/airbyte-integrations/connectors/source-gitlab/source_gitlab/schemas/group_issue_boards.json b/airbyte-integrations/connectors/source-gitlab/source_gitlab/schemas/group_issue_boards.json new file mode 100644 index 000000000000..4d50781a45cc --- /dev/null +++ b/airbyte-integrations/connectors/source-gitlab/source_gitlab/schemas/group_issue_boards.json @@ -0,0 +1,61 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "integer"] + }, + "name": { + "type": ["null", "string"] + }, + "hide_backlog_list": { + "type": ["null", "boolean"] + }, + "hide_closed_list": { + "type": ["null", "boolean"] + }, + "project": { + "type": ["null", "integer"] + }, + "lists": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "id": { + "type": ["null", "integer"] + }, + "label": { + "type": ["null", "object"], + "properties": { + "id": { + "type": ["null", "integer"] + }, + "name": { + "type": ["null", "string"] + }, + "description": { + "type": ["null", "string"] + }, + "description_html": { + "type": ["null", "string"] + }, + "text_color": { + "type": ["null", "string"] + }, + "color": { + "type": ["null", "string"] + } + } + }, + "position": { + "type": ["null", "integer"] + } + } + } + }, + "group_id": { + "type": ["null", "integer"] + } + } +} diff --git a/airbyte-integrations/connectors/source-gitlab/source_gitlab/source.py b/airbyte-integrations/connectors/source-gitlab/source_gitlab/source.py index 12db84130748..ca162994301a 100644 --- a/airbyte-integrations/connectors/source-gitlab/source_gitlab/source.py +++ b/airbyte-integrations/connectors/source-gitlab/source_gitlab/source.py @@ -17,6 +17,7 @@ EpicIssues, Epics, GitlabStream, + GroupIssueBoards, GroupLabels, GroupMembers, GroupMilestones, @@ -105,6 +106,7 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: Commits(parent_stream=projects, repository_part=True, start_date=config["start_date"], **auth_params), epics, EpicIssues(parent_stream=epics, **auth_params), + GroupIssueBoards(parent_stream=groups, **auth_params), Issues(parent_stream=projects, start_date=config["start_date"], **auth_params), Jobs(parent_stream=pipelines, **auth_params), ProjectMilestones(parent_stream=projects, **auth_params), diff --git a/airbyte-integrations/connectors/source-gitlab/source_gitlab/streams.py b/airbyte-integrations/connectors/source-gitlab/source_gitlab/streams.py index e134a2075acb..114d56716ea1 100644 --- a/airbyte-integrations/connectors/source-gitlab/source_gitlab/streams.py +++ b/airbyte-integrations/connectors/source-gitlab/source_gitlab/streams.py @@ -306,6 +306,11 @@ def transform(self, record, stream_slice: Mapping[str, Any] = None, **kwargs): return record +class GroupIssueBoards(GitlabChildStream): + path_template = "groups/{id}/boards" + flatten_parent_id = True + + class Users(GitlabChildStream): pass diff --git a/airbyte-integrations/connectors/source-google-ads/Dockerfile b/airbyte-integrations/connectors/source-google-ads/Dockerfile index 3ff82c8e08f0..32322371644a 100644 --- a/airbyte-integrations/connectors/source-google-ads/Dockerfile +++ b/airbyte-integrations/connectors/source-google-ads/Dockerfile @@ -13,5 +13,5 @@ COPY main.py ./ ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.41 +LABEL io.airbyte.version=0.1.42 LABEL io.airbyte.name=airbyte/source-google-ads diff --git a/airbyte-integrations/connectors/source-google-ads/setup.py b/airbyte-integrations/connectors/source-google-ads/setup.py index 5323ff07478a..4f0bf491da5f 100644 --- a/airbyte-integrations/connectors/source-google-ads/setup.py +++ b/airbyte-integrations/connectors/source-google-ads/setup.py @@ -5,9 +5,9 @@ from setuptools import find_packages, setup -# grpcio-status is required by google ads but is not listed in its dependencies. -# this package must be of the same version range that grpcio is. -MAIN_REQUIREMENTS = ["airbyte-cdk~=0.1", "google-ads==14.1.0", "grpcio-status >= 1.38.1, < 2.0.0", "pendulum"] +# pin protobuf==3.20.0 as other versions may cause problems on different architectures +# (see https://github.com/airbytehq/airbyte/issues/13580) +MAIN_REQUIREMENTS = ["airbyte-cdk~=0.1", "google-ads==15.1.1", "protobuf==3.20.0", "pendulum"] TEST_REQUIREMENTS = ["pytest~=6.1", "pytest-mock", "freezegun", "requests-mock"] diff --git a/airbyte-integrations/connectors/source-google-ads/source_google_ads/google_ads.py b/airbyte-integrations/connectors/source-google-ads/source_google_ads/google_ads.py index 4950f820f3c0..b39d282a7924 100644 --- a/airbyte-integrations/connectors/source-google-ads/source_google_ads/google_ads.py +++ b/airbyte-integrations/connectors/source-google-ads/source_google_ads/google_ads.py @@ -30,6 +30,7 @@ "geographic_report": "geographic_view", "keyword_report": "keyword_view", } +API_VERSION = "v9" class GoogleAds: @@ -39,7 +40,7 @@ def __init__(self, credentials: MutableMapping[str, Any]): # `google-ads` library version `14.0.0` and higher requires an additional required parameter `use_proto_plus`. # More details can be found here: https://developers.google.com/google-ads/api/docs/client-libs/python/protobuf-messages credentials["use_proto_plus"] = True - self.client = GoogleAdsClient.load_from_dict(credentials) + self.client = GoogleAdsClient.load_from_dict(credentials, version=API_VERSION) self.ga_service = self.client.get_service("GoogleAdsService") def send_request(self, query: str, customer_id: str) -> Iterator[SearchGoogleAdsResponse]: diff --git a/airbyte-integrations/connectors/source-google-ads/unit_tests/common.py b/airbyte-integrations/connectors/source-google-ads/unit_tests/common.py index eb396a16ef00..0038abf2f2dc 100644 --- a/airbyte-integrations/connectors/source-google-ads/unit_tests/common.py +++ b/airbyte-integrations/connectors/source-google-ads/unit_tests/common.py @@ -5,7 +5,7 @@ import json from google.ads.googleads.errors import GoogleAdsException -from google.ads.googleads.v8 import GoogleAdsFailure +from google.ads.googleads.v9 import GoogleAdsFailure class MockSearchRequest: diff --git a/airbyte-integrations/connectors/source-google-ads/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-google-ads/unit_tests/test_streams.py index acb65d2d75cd..5973871bc234 100644 --- a/airbyte-integrations/connectors/source-google-ads/unit_tests/test_streams.py +++ b/airbyte-integrations/connectors/source-google-ads/unit_tests/test_streams.py @@ -7,8 +7,8 @@ import pytest from airbyte_cdk.models import SyncMode from google.ads.googleads.errors import GoogleAdsException -from google.ads.googleads.v8.errors.types.errors import ErrorCode, GoogleAdsError, GoogleAdsFailure -from google.ads.googleads.v8.errors.types.request_error import RequestErrorEnum +from google.ads.googleads.v9.errors.types.errors import ErrorCode, GoogleAdsError, GoogleAdsFailure +from google.ads.googleads.v9.errors.types.request_error import RequestErrorEnum from grpc import RpcError from source_google_ads.google_ads import GoogleAds from source_google_ads.streams import ClickView diff --git a/airbyte-integrations/connectors/source-google-analytics-data-api/.dockerignore b/airbyte-integrations/connectors/source-google-analytics-data-api/.dockerignore new file mode 100644 index 000000000000..7f4116453dc7 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-analytics-data-api/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_google_analytics_data_api +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-google-analytics-data-api/Dockerfile b/airbyte-integrations/connectors/source-google-analytics-data-api/Dockerfile new file mode 100644 index 000000000000..fa3b8025c9f2 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-analytics-data-api/Dockerfile @@ -0,0 +1,32 @@ +FROM python:3.9.11-slim as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apt update -y && apt upgrade -y + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# copy payload code only +COPY main.py ./ +COPY source_google_analytics_data_api ./source_google_analytics_data_api + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.0.1 +LABEL io.airbyte.name=airbyte/source-google-analytics-data-api diff --git a/airbyte-integrations/connectors/source-google-analytics-data-api/README.md b/airbyte-integrations/connectors/source-google-analytics-data-api/README.md new file mode 100644 index 000000000000..4d44636be217 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-analytics-data-api/README.md @@ -0,0 +1,129 @@ +# Google Analytics Data Api Source + +This is the repository for the Google Analytics Data Api source connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/google-analytics-data-api). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.7.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +From the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-google-analytics-data-api:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/google-analytics-data-api) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_google_analytics_data_api/spec.{yaml,json}` file. +Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source google-analytics-data-api test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py discover --config secrets/config.json +python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-google-analytics-data-api:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-google-analytics-data-api:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-google-analytics-data-api:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-google-analytics-data-api:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-google-analytics-data-api:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-google-analytics-data-api:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. +First install test dependencies into your virtual environment: +``` +pip install .[tests] +``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +python -m pytest unit_tests +``` + +### Integration Tests +There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). +#### Custom Integration tests +Place custom tests inside `integration_tests/` folder, then, from the connector root, run +``` +python -m pytest integration_tests +``` +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. +To run your integration tests with acceptance tests, from the connector root, run +``` +python -m pytest integration_tests -p integration_tests.acceptance +``` +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-google-analytics-data-api:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-google-analytics-data-api:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-google-analytics-data-api/acceptance-test-config.yml b/airbyte-integrations/connectors/source-google-analytics-data-api/acceptance-test-config.yml new file mode 100644 index 000000000000..c08884b79567 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-analytics-data-api/acceptance-test-config.yml @@ -0,0 +1,20 @@ +# See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-google-analytics-data-api:dev +tests: + spec: + - spec_path: "source_google_analytics_data_api/spec.json" + connection: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + - config_path: "secrets/config.json" + basic_read: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + full_refresh: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-google-analytics-data-api/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-google-analytics-data-api/acceptance-test-docker.sh new file mode 100755 index 000000000000..c51577d10690 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-analytics-data-api/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-google-analytics-data-api/build.gradle b/airbyte-integrations/connectors/source-google-analytics-data-api/build.gradle new file mode 100644 index 000000000000..46fbe7b6e812 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-analytics-data-api/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_google_analytics_data_api_singer' +} diff --git a/airbyte-integrations/connectors/source-google-analytics-data-api/integration_tests/__init__.py b/airbyte-integrations/connectors/source-google-analytics-data-api/integration_tests/__init__.py new file mode 100644 index 000000000000..1100c1c58cf5 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-analytics-data-api/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-google-analytics-data-api/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-google-analytics-data-api/integration_tests/acceptance.py new file mode 100644 index 000000000000..1d66fbf1a331 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-analytics-data-api/integration_tests/acceptance.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + yield diff --git a/airbyte-integrations/connectors/source-google-analytics-data-api/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-google-analytics-data-api/integration_tests/configured_catalog.json new file mode 100644 index 000000000000..b6849522598b --- /dev/null +++ b/airbyte-integrations/connectors/source-google-analytics-data-api/integration_tests/configured_catalog.json @@ -0,0 +1,15 @@ +{ + "streams": [ + { + "stream": { + "name": "Analytics Report", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"], + "source_defined_cursor": false, + "default_cursor_field": ["column_name"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-google-analytics-data-api/integration_tests/input_state.json b/airbyte-integrations/connectors/source-google-analytics-data-api/integration_tests/input_state.json new file mode 100644 index 000000000000..62adee15c75d --- /dev/null +++ b/airbyte-integrations/connectors/source-google-analytics-data-api/integration_tests/input_state.json @@ -0,0 +1 @@ +{"crash_report":{"date":"20220429"}} diff --git a/airbyte-integrations/connectors/source-google-analytics-data-api/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-google-analytics-data-api/integration_tests/invalid_config.json new file mode 100644 index 000000000000..10e4173e92e8 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-analytics-data-api/integration_tests/invalid_config.json @@ -0,0 +1,9 @@ +{ + "property_id": "1", + "json_credentials": "wrong", + "report_name": "crash_report", + "dimensions": "date, operatingSystem, streamId", + "metrics": "crashAffectedUsers, crashFreeUsersRate, totalUsers", + "date_ranges_start_date": "30daysAgo", + "date_ranges_end_date": "yesterday" +} diff --git a/airbyte-integrations/connectors/source-google-analytics-data-api/main.py b/airbyte-integrations/connectors/source-google-analytics-data-api/main.py new file mode 100644 index 000000000000..ba61cf98fe18 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-analytics-data-api/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_google_analytics_data_api import SourceGoogleAnalyticsDataApi + +if __name__ == "__main__": + source = SourceGoogleAnalyticsDataApi() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-google-analytics-data-api/requirements.txt b/airbyte-integrations/connectors/source-google-analytics-data-api/requirements.txt new file mode 100644 index 000000000000..7be17a56d745 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-analytics-data-api/requirements.txt @@ -0,0 +1,3 @@ +# This file is autogenerated -- only edit if you know what you are doing. Use setup.py for declaring dependencies. +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-google-analytics-data-api/setup.py b/airbyte-integrations/connectors/source-google-analytics-data-api/setup.py new file mode 100644 index 000000000000..60b3728cf709 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-analytics-data-api/setup.py @@ -0,0 +1,27 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = ["airbyte-cdk~=0.1", "google-analytics-data==0.11.2"] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_google_analytics_data_api", + description="Source implementation for Google Analytics Data Api.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "schemas/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-google-analytics-data-api/source_google_analytics_data_api/__init__.py b/airbyte-integrations/connectors/source-google-analytics-data-api/source_google_analytics_data_api/__init__.py new file mode 100644 index 000000000000..0a1fac435046 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-analytics-data-api/source_google_analytics_data_api/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceGoogleAnalyticsDataApi + +__all__ = ["SourceGoogleAnalyticsDataApi"] diff --git a/airbyte-integrations/connectors/source-google-analytics-data-api/source_google_analytics_data_api/client.py b/airbyte-integrations/connectors/source-google-analytics-data-api/source_google_analytics_data_api/client.py new file mode 100644 index 000000000000..3554b72a0b4a --- /dev/null +++ b/airbyte-integrations/connectors/source-google-analytics-data-api/source_google_analytics_data_api/client.py @@ -0,0 +1,63 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from collections import Mapping +from typing import Any, Dict, List + +from google.analytics.data_v1beta import BetaAnalyticsDataClient, DateRange, Dimension, Metric, OrderBy, RunReportRequest, RunReportResponse +from google.oauth2 import service_account + +DEFAULT_CURSOR_FIELD = "date" + + +class Client: + def __init__(self, json_credentials: Mapping[str, str]): + self.json_credentials = json_credentials + + def run_report(self, property_id: str, dimensions: List[str], metrics: List[str], start_date: str, end_date: str) -> RunReportResponse: + dimensions = [Dimension(name=dim) for dim in dimensions if dim != DEFAULT_CURSOR_FIELD] + dimensions.append(Dimension(name=DEFAULT_CURSOR_FIELD)) + + metrics = [Metric(name=metric) for metric in metrics] + + credentials = service_account.Credentials.from_service_account_info(self.json_credentials) + client = BetaAnalyticsDataClient(credentials=credentials) + + request = RunReportRequest( + property=f"properties/{property_id}", + dimensions=dimensions, + metrics=metrics, + date_ranges=[DateRange(start_date=start_date, end_date=end_date)], + order_bys=[ + OrderBy( + dimension=OrderBy.DimensionOrderBy( + dimension_name=DEFAULT_CURSOR_FIELD, order_type=OrderBy.DimensionOrderBy.OrderType.ALPHANUMERIC + ) + ) + ], + ) + + return client.run_report(request) + + @staticmethod + def response_to_list(response: RunReportResponse) -> List[Dict[str, Any]]: + """ + Returns the report response as a list of dictionaries + + :param response: The run report response + + :return: A list of dictionaries, the key is either dimension name or metric name and the value is the dimension or the metric value + """ + dimensions = list(map(lambda h: h.name, response.dimension_headers)) + metrics = list(map(lambda h: h.name, response.metric_headers)) + + rows = [] + + for row in response.rows: + data = dict(zip(dimensions, list(map(lambda v: v.value, row.dimension_values)))) + data.update(dict(zip(metrics, list(map(lambda v: float(v.value), row.metric_values))))) + rows.append(data) + + return rows diff --git a/airbyte-integrations/connectors/source-google-analytics-data-api/source_google_analytics_data_api/source.py b/airbyte-integrations/connectors/source-google-analytics-data-api/source_google_analytics_data_api/source.py new file mode 100644 index 000000000000..81bb4ebe6e75 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-analytics-data-api/source_google_analytics_data_api/source.py @@ -0,0 +1,144 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import json +import logging +from datetime import datetime +from typing import Any, Generator, Mapping, MutableMapping + +from airbyte_cdk.logger import AirbyteLogger +from airbyte_cdk.models import ( + AirbyteCatalog, + AirbyteConnectionStatus, + AirbyteMessage, + AirbyteRecordMessage, + AirbyteStateMessage, + AirbyteStream, + ConfiguredAirbyteCatalog, + Status, + SyncMode, + Type, +) +from airbyte_cdk.sources import Source +from google.analytics.data_v1beta import RunReportResponse +from source_google_analytics_data_api.client import DEFAULT_CURSOR_FIELD, Client + + +class SourceGoogleAnalyticsDataApi(Source): + def check(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> AirbyteConnectionStatus: + """ + Tests if the input configuration can be used to successfully connect to the integration + e.g: if a provided Stripe API token can be used to connect to the Stripe API. + + :param logger: Logging object to display debug/info/error to the logs + (logs will not be accessible via airbyte UI if they are not passed to this logger) + :param config: Json object containing the configuration of this source, content of this json is as specified in + the properties of the spec.json/spec.yaml file + + :return: AirbyteConnectionStatus indicating a Success or Failure + """ + try: + self._run_report(config) + + return AirbyteConnectionStatus(status=Status.SUCCEEDED) + except Exception as e: + return AirbyteConnectionStatus(status=Status.FAILED, message=f"An exception occurred: {str(e)}") + + def discover(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> AirbyteCatalog: + """ + Returns an AirbyteCatalog representing the available streams and fields in this integration. + For example, given valid credentials to a Postgres database, + returns an Airbyte catalog where each postgres table is a stream, and each table column is a field. + + :param logger: Logging object to display debug/info/error to the logs + (logs will not be accessible via airbyte UI if they are not passed to this logger) + :param config: Json object containing the configuration of this source, content of this json is as specified in + the properties of the spec.json/spec.yaml file + + :return: AirbyteCatalog is an object describing a list of all available streams in this source. + A stream is an AirbyteStream object that includes: + - its stream name (or table name in the case of Postgres) + - json_schema providing the specifications of expected schema for this stream (a list of columns described + by their names and types) + """ + report_name = config.get("report_name") + + response = self._run_report(config) + + properties = {DEFAULT_CURSOR_FIELD: {"type": "string"}} + + for dimension in response.dimension_headers: + properties[dimension.name] = {"type": "string"} + + for metric in response.metric_headers: + properties[metric.name] = {"type": "number"} + + json_schema = { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": properties, + } + + primary_key = list(map(lambda h: [h.name], response.dimension_headers)) + + stream = AirbyteStream( + name=report_name, + json_schema=json_schema, + supported_sync_modes=[SyncMode.full_refresh, SyncMode.incremental], + source_defined_primary_key=primary_key, + default_cursor_field=[DEFAULT_CURSOR_FIELD], + ) + return AirbyteCatalog(streams=[stream]) + + def read( + self, logger: logging.Logger, config: Mapping[str, Any], catalog: ConfiguredAirbyteCatalog, state: MutableMapping[str, Any] = None + ) -> Generator[AirbyteMessage, None, None]: + """ + Returns a generator of the AirbyteMessages generated by reading the source with the given configuration, + catalog, and state. + + :param logger: Logging object to display debug/info/error to the logs + (logs will not be accessible via airbyte UI if they are not passed to this logger) + :param config: Json object containing the configuration of this source, content of this json is as specified in + the properties of the spec.json/spec.yaml file + :param catalog: The input catalog is a ConfiguredAirbyteCatalog which is almost the same as AirbyteCatalog + returned by discover(), but + in addition, it's been configured in the UI! For each particular stream and field, there may have been provided + with extra modifications such as: filtering streams and/or columns out, renaming some entities, etc + :param state: When a Airbyte reads data from a source, it might need to keep a checkpoint cursor to resume + replication in the future from that saved checkpoint. + This is the object that is provided with state from previous runs and avoid replicating the entire set of + data everytime. + + :return: A generator that produces a stream of AirbyteRecordMessage contained in AirbyteMessage object. + """ + report_name = config.get("report_name") + + response = self._run_report(config) + rows = Client.response_to_list(response) + + last_cursor_value = state.get(report_name, {}).get(DEFAULT_CURSOR_FIELD, "") + + for row in rows: + if last_cursor_value <= row[DEFAULT_CURSOR_FIELD]: + yield AirbyteMessage( + type=Type.RECORD, + record=AirbyteRecordMessage(stream=report_name, data=row, emitted_at=int(datetime.now().timestamp()) * 1000), + ) + + last_cursor_value = row[DEFAULT_CURSOR_FIELD] + + yield AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(data={report_name: {DEFAULT_CURSOR_FIELD: last_cursor_value}})) + + @staticmethod + def _run_report(config: Mapping[str, Any]) -> RunReportResponse: + property_id = config.get("property_id") + dimensions = config.get("dimensions", "").split(",") + metrics = config.get("metrics", "").split(",") + start_date = config.get("date_ranges_start_date") + end_date = config.get("date_ranges_end_date") + json_credentials = config.get("json_credentials") + + return Client(json.loads(json_credentials)).run_report(property_id, dimensions, metrics, start_date, end_date) diff --git a/airbyte-integrations/connectors/source-google-analytics-data-api/source_google_analytics_data_api/spec.json b/airbyte-integrations/connectors/source-google-analytics-data-api/source_google_analytics_data_api/spec.json new file mode 100644 index 000000000000..7d72fb08180a --- /dev/null +++ b/airbyte-integrations/connectors/source-google-analytics-data-api/source_google_analytics_data_api/spec.json @@ -0,0 +1,59 @@ +{ + "documentationUrl": "https://docsurl.com", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Google Analytics Data API Spec", + "type": "object", + "required": [ + "property_id", "json_credentials", "report_name", + "dimensions", "metrics", "date_ranges_start_date", + "date_ranges_end_date" + ], + "additionalProperties": false, + "properties": { + "property_id": { + "type": "string", + "title": "Property ID", + "description": "A Google Analytics GA4 property identifier whose events are tracked. Specified in the URL path and not the body", + "order": 1 + }, + "json_credentials": { + "type": "string", + "title": "JSON Credentials", + "description": "The JSON key of the Service Account to use for authorization", + "airbyte_secret": true, + "order": 2 + }, + "report_name": { + "type": "string", + "title": "Report Name", + "description": "The report name", + "order": 3 + }, + "dimensions": { + "type": "string", + "title": "Dimensions", + "description": "Comma seprated report dimensions https://developers.google.com/analytics/devguides/reporting/data/v1/api-schema#dimensions", + "order": 4 + }, + "metrics": { + "type": "string", + "title": "Metrics", + "description": "Comma seprated report metrics https://developers.google.com/analytics/devguides/reporting/data/v1/api-schema#metrics", + "order": 5 + }, + "date_ranges_start_date": { + "type": "string", + "title": "Date Range Start Date", + "description": "The start date. One of the values Ndaysago, yesterday, today or in the format YYYY-MM-DD", + "order": 6 + }, + "date_ranges_end_date": { + "type": "string", + "title": "Date Range End Date", + "description": "The end date. One of the values Ndaysago, yesterday, today or in the format YYYY-MM-DD", + "order": 7 + } + } + } +} diff --git a/airbyte-integrations/connectors/source-google-analytics-data-api/unit_tests/__init__.py b/airbyte-integrations/connectors/source-google-analytics-data-api/unit_tests/__init__.py new file mode 100644 index 000000000000..1100c1c58cf5 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-analytics-data-api/unit_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-google-analytics-data-api/unit_tests/test_source.py b/airbyte-integrations/connectors/source-google-analytics-data-api/unit_tests/test_source.py new file mode 100644 index 000000000000..f4fb22efe01a --- /dev/null +++ b/airbyte-integrations/connectors/source-google-analytics-data-api/unit_tests/test_source.py @@ -0,0 +1,40 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from unittest.mock import MagicMock + +from airbyte_cdk.models import AirbyteConnectionStatus, Status +from source_google_analytics_data_api import SourceGoogleAnalyticsDataApi + + +def test_check_connection(mocker): + source = SourceGoogleAnalyticsDataApi() + + report_mock = MagicMock() + mocker.patch.object(SourceGoogleAnalyticsDataApi, "_run_report", return_value=report_mock) + + logger_mock = MagicMock() + config_mock = MagicMock() + + assert source.check(logger_mock, config_mock) == AirbyteConnectionStatus(status=Status.SUCCEEDED) + + +def test_discover(mocker): + source = SourceGoogleAnalyticsDataApi() + + dimensions_header_mock = MagicMock() + dimensions_header_mock.name = "dimensions" + + metrics_header_mock = MagicMock() + metrics_header_mock.name = "metrics" + + report_mock = MagicMock(dimension_headers=[dimensions_header_mock], metric_headers=[metrics_header_mock]) + mocker.patch.object(SourceGoogleAnalyticsDataApi, "_run_report", return_value=report_mock) + + logger_mock = MagicMock() + config_mock = {"report_name": "test"} + + catalog = source.discover(logger_mock, config_mock) + expected_streams_number = 1 + assert len(catalog.streams) == expected_streams_number diff --git a/airbyte-integrations/connectors/source-google-analytics-data-api/unit_tests/unit_test.py b/airbyte-integrations/connectors/source-google-analytics-data-api/unit_tests/unit_test.py new file mode 100644 index 000000000000..dddaea0060fa --- /dev/null +++ b/airbyte-integrations/connectors/source-google-analytics-data-api/unit_tests/unit_test.py @@ -0,0 +1,7 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +def test_example_method(): + assert True diff --git a/airbyte-integrations/connectors/source-google-analytics-v4/acceptance-test-config.yml b/airbyte-integrations/connectors/source-google-analytics-v4/acceptance-test-config.yml index 920381712869..c497e268a2c2 100644 --- a/airbyte-integrations/connectors/source-google-analytics-v4/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-google-analytics-v4/acceptance-test-config.yml @@ -20,12 +20,11 @@ tests: empty_streams: [] expect_records: path: "integration_tests/expected_records.txt" -# Since the connector makes 2 days look back window, it can not pass SAT where all records produce cursor value greater ao equal to a state value -# see https://github.com/airbytehq/airbyte/issues/12013 for details -# incremental: -# - config_path: "secrets/service_config.json" -# configured_catalog_path: "integration_tests/configured_catalog.json" -# future_state_path: "integration_tests/abnormal_state.json" + incremental: + - config_path: "secrets/service_config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + future_state_path: "integration_tests/abnormal_state.json" + threshold_days: 2 full_refresh: - config_path: "secrets/service_config.json" configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-google-search-console/Dockerfile b/airbyte-integrations/connectors/source-google-search-console/Dockerfile index 49d3c71348d8..12f0a4d5639c 100755 --- a/airbyte-integrations/connectors/source-google-search-console/Dockerfile +++ b/airbyte-integrations/connectors/source-google-search-console/Dockerfile @@ -10,7 +10,6 @@ COPY setup.py ./ RUN pip install . ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENV SENTRY_DSN "https://d4b03de0c4574c78999b8d58e55243dc@o1009025.ingest.sentry.io/6102835" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] LABEL io.airbyte.version=0.1.12 diff --git a/airbyte-integrations/connectors/source-hubspot/Dockerfile b/airbyte-integrations/connectors/source-hubspot/Dockerfile index 3609df412a82..14de9703e161 100644 --- a/airbyte-integrations/connectors/source-hubspot/Dockerfile +++ b/airbyte-integrations/connectors/source-hubspot/Dockerfile @@ -34,5 +34,5 @@ COPY source_hubspot ./source_hubspot ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.68 +LABEL io.airbyte.version=0.1.72 LABEL io.airbyte.name=airbyte/source-hubspot diff --git a/airbyte-integrations/connectors/source-hubspot/acceptance-test-config.yml b/airbyte-integrations/connectors/source-hubspot/acceptance-test-config.yml index 5be369de136d..e9c423e52a15 100644 --- a/airbyte-integrations/connectors/source-hubspot/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-hubspot/acceptance-test-config.yml @@ -19,31 +19,89 @@ tests: - config_path: "secrets/config.json" timeout_seconds: 600 configured_catalog_path: "sample_files/basic_read_catalog.json" - empty_streams: ["workflows", "form_submissions", "ticket_pipelines"] - expect_records: - path: "integration_tests/expected_records.txt" + empty_streams: ["form_submissions", "ticket_pipelines", "engagements_meetings", "engagements_emails", "engagements", "feedback_submissions", "engagements_calls", "quotes"] + # This test commented out, since it produces errors during active testing + # expect_records: + # path: "integration_tests/expected_records.txt" - config_path: "secrets/config_oauth.json" timeout_seconds: 600 configured_catalog_path: "sample_files/basic_read_oauth_catalog.json" - empty_streams: ["workflows", "contacts_list_memberships", "form_submissions", "ticket_pipelines"] - expect_records: - path: "integration_tests/expected_records.txt" + empty_streams: ["form_submissions", "ticket_pipelines", "engagements_meetings", "engagements_emails", "engagements", "feedback_submissions", "engagements_calls", "quotes"] + # This test commented out, since it produces errors during active testing + # expect_records: + # path: "integration_tests/expected_records.txt" incremental: - config_path: "secrets/config.json" configured_catalog_path: "sample_files/incremental_catalog.json" future_state_path: "integration_tests/abnormal_state.json" full_refresh: - config_path: "secrets/config.json" - # This config is the same as for basic read except for missing `property_history` stream. - # This stream emits records with dynamic timestamps under some conditions - # (properties like `hs_time_in_lead`, `hs_time_in_subscriber` etc.) which makes it impossible to compare records. - # Ignoring these fields makes testing this stream senseless because those valuable values of other property types - # will also be ignored. Instead, we test this stream against expected records in basic read test. configured_catalog_path: "sample_files/full_refresh_catalog.json" + ignored_fields: + "companies": [ "properties", "hs_time_in_customer" ] + "companies": [ "properties", "hs_time_in_evangelist" ] + "companies": [ "properties", "hs_time_in_lead" ] + "companies": [ "properties", "hs_time_in_marketingqualifiedlead" ] + "companies": [ "properties", "hs_time_in_opportunity" ] + "companies": [ "properties", "hs_time_in_other" ] + "companies": [ "properties", "hs_time_in_salesqualifiedlead" ] + "companies": [ "properties", "hs_time_in_subscriber" ] + "contacts": [ "properties", "hs_time_in_customer" ] + "contacts": [ "properties", "hs_time_in_evangelist" ] + "contacts": [ "properties", "hs_time_in_lead" ] + "contacts": [ "properties", "hs_time_in_marketingqualifiedlead" ] + "contacts": [ "properties", "hs_time_in_opportunity" ] + "contacts": [ "properties", "hs_time_in_other" ] + "contacts": [ "properties", "hs_time_in_salesqualifiedlead" ] + "contacts": [ "properties", "hs_time_in_subscriber" ] + "deals": [ "properties", "hs_time_in_9567448" ] + "deals": [ "properties", "hs_time_in_9567449" ] + "deals": [ "properties", "hs_time_in_appointmentscheduled" ] + "deals": [ "properties", "hs_time_in_closedlost" ] + "deals": [ "properties", "hs_time_in_closedwon" ] + "deals": [ "properties", "hs_time_in_contractsent" ] + "deals": [ "properties", "hs_time_in_customclosedwonstage" ] + "deals": [ "properties", "hs_time_in_decisionmakerboughtin" ] + "deals": [ "properties", "hs_time_in_presentationscheduled" ] + "deals": [ "properties", "hs_time_in_qualifiedtobuy" ] + "tickets": [ "properties", "hs_time_in_1" ] + "tickets": [ "properties", "hs_time_in_2" ] + "tickets": [ "properties", "hs_time_in_3" ] + "tickets": [ "properties", "hs_time_in_4" ] + "property_history": [ "property", "hs_time_in_lead" ] + "property_history": [ "property", "hs_time_in_subscriber" ] - config_path: "secrets/config_oauth.json" - # This config is the same as for basic read except for missing `property_history` stream. - # This stream emits records with dynamic timestamps under some conditions - # (properties like `hs_time_in_lead`, `hs_time_in_subscriber` etc.) which makes it impossible to compare records. - # Ignoring these fields makes testing this stream senseless because those valuable values of other property types - # will also be ignored. Instead, we test this stream against expected records in basic read test. - configured_catalog_path: "sample_files/full_refresh_oauth_catalog.json" \ No newline at end of file + configured_catalog_path: "sample_files/full_refresh_oauth_catalog.json" + ignored_fields: + "companies": [ "properties", "hs_time_in_customer" ] + "companies": [ "properties", "hs_time_in_evangelist" ] + "companies": [ "properties", "hs_time_in_lead" ] + "companies": [ "properties", "hs_time_in_marketingqualifiedlead" ] + "companies": [ "properties", "hs_time_in_opportunity" ] + "companies": [ "properties", "hs_time_in_other" ] + "companies": [ "properties", "hs_time_in_salesqualifiedlead" ] + "companies": [ "properties", "hs_time_in_subscriber" ] + "contacts": [ "properties", "hs_time_in_customer" ] + "contacts": [ "properties", "hs_time_in_evangelist" ] + "contacts": [ "properties", "hs_time_in_lead" ] + "contacts": [ "properties", "hs_time_in_marketingqualifiedlead" ] + "contacts": [ "properties", "hs_time_in_opportunity" ] + "contacts": [ "properties", "hs_time_in_other" ] + "contacts": [ "properties", "hs_time_in_salesqualifiedlead" ] + "contacts": [ "properties", "hs_time_in_subscriber" ] + "deals": [ "properties", "hs_time_in_9567448" ] + "deals": [ "properties", "hs_time_in_9567449" ] + "deals": [ "properties", "hs_time_in_appointmentscheduled" ] + "deals": [ "properties", "hs_time_in_closedlost" ] + "deals": [ "properties", "hs_time_in_closedwon" ] + "deals": [ "properties", "hs_time_in_contractsent" ] + "deals": [ "properties", "hs_time_in_customclosedwonstage" ] + "deals": [ "properties", "hs_time_in_decisionmakerboughtin" ] + "deals": [ "properties", "hs_time_in_presentationscheduled" ] + "deals": [ "properties", "hs_time_in_qualifiedtobuy" ] + "tickets": [ "properties", "hs_time_in_1" ] + "tickets": [ "properties", "hs_time_in_2" ] + "tickets": [ "properties", "hs_time_in_3" ] + "tickets": [ "properties", "hs_time_in_4" ] + "property_history": [ "property", "hs_time_in_lead" ] + "property_history": [ "property", "hs_time_in_subscriber" ] diff --git a/airbyte-integrations/connectors/source-hubspot/integration_tests/expected_records.txt b/airbyte-integrations/connectors/source-hubspot/integration_tests/expected_records.txt index e8586787f8eb..79b75c148d0e 100644 --- a/airbyte-integrations/connectors/source-hubspot/integration_tests/expected_records.txt +++ b/airbyte-integrations/connectors/source-hubspot/integration_tests/expected_records.txt @@ -1,148 +1,640 @@ -{"stream": "property_history", "data": {"value": "0", "source-type": "MIGRATION", "source-id": "BackfillReadtimeCalculatedPropertiesJob", "source-label": null, "updated-by-user-id": null, "timestamp": 1628718463905, "selected": false, "property": "num_unique_conversion_events", "vid": 501}, "emitted_at": 1654677520163} -{"stream": "property_history", "data": {"value": "1616173106539", "source-type": "CONTACTS", "source-id": "CRM_UI", "source-label": null, "updated-by-user-id": 12282590, "timestamp": 1616173106539, "selected": false, "property": "createdate", "vid": 501}, "emitted_at": 1654677520165} -{"stream": "property_history", "data": {"value": "52550153", "source-type": "CRM_UI", "source-id": "userId:12282590", "source-label": null, "updated-by-user-id": 12282590, "timestamp": 1616173106523, "selected": false, "property": "hubspot_owner_id", "vid": 501}, "emitted_at": 1654677520167} -{"stream": "property_history", "data": {"value": "1621592488593", "source-type": "DEALS", "source-id": "DealRollupProperties", "source-label": null, "updated-by-user-id": null, "timestamp": 1621592560527, "selected": false, "property": "first_deal_created_date", "vid": 501}, "emitted_at": 1654677520168} -{"stream": "property_history", "data": {"value": "1", "source-type": "DEALS", "source-id": "DealRollupProperties", "source-label": null, "updated-by-user-id": null, "timestamp": 1621592560527, "selected": false, "property": "num_associated_deals", "vid": 501}, "emitted_at": 1654677520169} -{"stream": "property_history", "data": {"value": "Kulak", "source-type": "CRM_UI", "source-id": "userId:12282590", "source-label": null, "updated-by-user-id": 12282590, "timestamp": 1616173106523, "selected": false, "property": "lastname", "vid": 501}, "emitted_at": 1654677520175} -{"stream": "property_history", "data": {"value": "1111111111", "source-type": "CRM_UI", "source-id": "userId:12282590", "source-label": null, "updated-by-user-id": 12282590, "timestamp": 1616173106523, "selected": false, "property": "phone", "vid": 501}, "emitted_at": 1654677520178} -{"stream": "property_history", "data": {"value": "0", "source-type": "MIGRATION", "source-id": "BackfillReadtimeCalculatedPropertiesJob", "source-label": null, "updated-by-user-id": null, "timestamp": 1628718463905, "selected": false, "property": "num_conversion_events", "vid": 501}, "emitted_at": 1654677520181} -{"stream": "property_history", "data": {"value": "Eugene", "source-type": "CRM_UI", "source-id": "userId:12282590", "source-label": null, "updated-by-user-id": 12282590, "timestamp": 1616173106523, "selected": false, "property": "firstname", "vid": 501}, "emitted_at": 1654677520188} -{"stream": "property_history", "data": {"value": "california", "source-type": "HEISENBERG", "source-id": "EMAIL_OPEN (9ff40077-4b32-3ec5-9aea-d69fd12683b9)", "source-label": null, "updated-by-user-id": null, "timestamp": 1616173245743, "selected": false, "property": "ip_state", "vid": 501}, "emitted_at": 1654677520189} -{"stream": "property_history", "data": {"value": "united states", "source-type": "HEISENBERG", "source-id": "EMAIL_OPEN (9ff40077-4b32-3ec5-9aea-d69fd12683b9)", "source-label": null, "updated-by-user-id": null, "timestamp": 1616173245743, "selected": false, "property": "ip_country", "vid": 501}, "emitted_at": 1654677520190} -{"stream": "property_history", "data": {"value": "ca", "source-type": "HEISENBERG", "source-id": "EMAIL_OPEN (9ff40077-4b32-3ec5-9aea-d69fd12683b9)", "source-label": null, "updated-by-user-id": null, "timestamp": 1616173245743, "selected": false, "property": "ip_state_code", "vid": 501}, "emitted_at": 1654677520193} -{"stream": "property_history", "data": {"value": "kulak.eugene@gmail.com", "source-type": "CONTACTS", "source-id": "CRM_UI", "source-label": null, "updated-by-user-id": 12282590, "timestamp": 1616173106539, "selected": false, "property": "email", "vid": 501}, "emitted_at": 1654677520197} -{"stream": "property_history", "data": {"value": "us", "source-type": "HEISENBERG", "source-id": "EMAIL_OPEN (9ff40077-4b32-3ec5-9aea-d69fd12683b9)", "source-label": null, "updated-by-user-id": null, "timestamp": 1616173245743, "selected": false, "property": "ip_country_code", "vid": 501}, "emitted_at": 1654677520198} -{"stream": "property_history", "data": {"value": "mountain view", "source-type": "HEISENBERG", "source-id": "EMAIL_OPEN (9ff40077-4b32-3ec5-9aea-d69fd12683b9)", "source-label": null, "updated-by-user-id": null, "timestamp": 1616173245743, "selected": false, "property": "ip_city", "vid": 501}, "emitted_at": 1654677520201} -{"stream": "property_history", "data": {"value": "1616173106523", "source-type": "CRM_UI", "source-id": "userId:12282590", "source-label": null, "updated-by-user-id": 12282590, "timestamp": 1616173106523, "selected": false, "property": "hubspot_owner_assigneddate", "vid": 501}, "emitted_at": 1654677520201} -{"stream": "property_history", "data": {"value": "opportunity", "source-type": "DEALS", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1621592560659, "selected": false, "property": "lifecyclestage", "vid": 501}, "emitted_at": 1654677520202} -{"stream": "property_history", "data": {"value": "subscriber", "source-type": "CRM_UI", "source-id": "userId:12282590", "source-label": null, "updated-by-user-id": 12282590, "timestamp": 1616173106523, "selected": false, "property": "lifecyclestage", "vid": 501}, "emitted_at": 1654677520203} -{"stream": "property_history", "data": {"value": "test contact 0", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970920, "selected": false, "property": "firstname", "vid": 601}, "emitted_at": 1654677520205} -{"stream": "property_history", "data": {"value": "Cambridge", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970920, "selected": false, "property": "city", "vid": 601}, "emitted_at": 1654677520205} -{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044971482, "selected": false, "property": "num_unique_conversion_events", "vid": 601}, "emitted_at": 1654677520205} -{"stream": "property_history", "data": {"value": "1634044970930", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970930, "selected": false, "property": "createdate", "vid": 601}, "emitted_at": 1654677520207} -{"stream": "property_history", "data": {"value": "HubSpot Test", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970920, "selected": false, "property": "company", "vid": 601}, "emitted_at": 1654677520211} -{"stream": "property_history", "data": {"value": "MA", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970920, "selected": false, "property": "state", "vid": 601}, "emitted_at": 1654677520211} -{"stream": "property_history", "data": {"value": "testingapicontact_0@hubspot.com", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970930, "selected": false, "property": "email", "vid": 601}, "emitted_at": 1654677520212} -{"stream": "property_history", "data": {"value": "02139", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970920, "selected": false, "property": "zip", "vid": 601}, "emitted_at": 1654677520212} -{"stream": "property_history", "data": {"value": "http://hubspot.com", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970920, "selected": false, "property": "website", "vid": 601}, "emitted_at": 1654677520212} -{"stream": "property_history", "data": {"value": "0 First Street", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970920, "selected": false, "property": "address", "vid": 601}, "emitted_at": 1654677520213} -{"stream": "property_history", "data": {"value": "testerson number 0", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970920, "selected": false, "property": "lastname", "vid": 601}, "emitted_at": 1654677520214} -{"stream": "property_history", "data": {"value": "555-122-2323", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970920, "selected": false, "property": "phone", "vid": 601}, "emitted_at": 1654677520215} -{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044971482, "selected": false, "property": "num_conversion_events", "vid": 601}, "emitted_at": 1654677520215} -{"stream": "property_history", "data": {"value": "subscriber", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970930, "selected": false, "property": "lifecyclestage", "vid": 601}, "emitted_at": 1654677520216} -{"stream": "property_history", "data": {"value": "test contact 10", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988081, "selected": false, "property": "firstname", "vid": 1101}, "emitted_at": 1654677520217} -{"stream": "property_history", "data": {"value": "Cambridge", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988081, "selected": false, "property": "city", "vid": 1101}, "emitted_at": 1654677520217} -{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988327, "selected": false, "property": "num_unique_conversion_events", "vid": 1101}, "emitted_at": 1654677520217} -{"stream": "property_history", "data": {"value": "1634044988089", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988089, "selected": false, "property": "createdate", "vid": 1101}, "emitted_at": 1654677520218} -{"stream": "property_history", "data": {"value": "HubSpot Test", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988081, "selected": false, "property": "company", "vid": 1101}, "emitted_at": 1654677520219} -{"stream": "property_history", "data": {"value": "MA", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988081, "selected": false, "property": "state", "vid": 1101}, "emitted_at": 1654677520219} -{"stream": "property_history", "data": {"value": "testingapicontact_10@hubspot.com", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988089, "selected": false, "property": "email", "vid": 1101}, "emitted_at": 1654677520220} -{"stream": "property_history", "data": {"value": "02139", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988081, "selected": false, "property": "zip", "vid": 1101}, "emitted_at": 1654677520220} -{"stream": "property_history", "data": {"value": "http://hubspot.com", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988081, "selected": false, "property": "website", "vid": 1101}, "emitted_at": 1654677520220} -{"stream": "property_history", "data": {"value": "10 First Street", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988081, "selected": false, "property": "address", "vid": 1101}, "emitted_at": 1654677520220} -{"stream": "property_history", "data": {"value": "testerson number 10", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988081, "selected": false, "property": "lastname", "vid": 1101}, "emitted_at": 1654677520221} -{"stream": "property_history", "data": {"value": "555-122-2323", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988081, "selected": false, "property": "phone", "vid": 1101}, "emitted_at": 1654677520222} -{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988327, "selected": false, "property": "num_conversion_events", "vid": 1101}, "emitted_at": 1654677520223} -{"stream": "property_history", "data": {"value": "subscriber", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988089, "selected": false, "property": "lifecyclestage", "vid": 1101}, "emitted_at": 1654677520224} -{"stream": "property_history", "data": {"value": "test contact 1", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044981822, "selected": false, "property": "firstname", "vid": 651}, "emitted_at": 1654677520225} -{"stream": "property_history", "data": {"value": "Cambridge", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044981822, "selected": false, "property": "city", "vid": 651}, "emitted_at": 1654677520225} -{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044982142, "selected": false, "property": "num_unique_conversion_events", "vid": 651}, "emitted_at": 1654677520225} -{"stream": "property_history", "data": {"value": "1634044981830", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044981830, "selected": false, "property": "createdate", "vid": 651}, "emitted_at": 1654677520226} -{"stream": "property_history", "data": {"value": "HubSpot Test", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044981822, "selected": false, "property": "company", "vid": 651}, "emitted_at": 1654677520228} -{"stream": "property_history", "data": {"value": "MA", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044981822, "selected": false, "property": "state", "vid": 651}, "emitted_at": 1654677520229} -{"stream": "property_history", "data": {"value": "testingapicontact_1@hubspot.com", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044981830, "selected": false, "property": "email", "vid": 651}, "emitted_at": 1654677520229} -{"stream": "property_history", "data": {"value": "02139", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044981822, "selected": false, "property": "zip", "vid": 651}, "emitted_at": 1654677520229} -{"stream": "property_history", "data": {"value": "http://hubspot.com", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044981822, "selected": false, "property": "website", "vid": 651}, "emitted_at": 1654677520229} -{"stream": "property_history", "data": {"value": "1 First Street", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044981822, "selected": false, "property": "address", "vid": 651}, "emitted_at": 1654677520229} -{"stream": "property_history", "data": {"value": "testerson number 1", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044981822, "selected": false, "property": "lastname", "vid": 651}, "emitted_at": 1654677520230} -{"stream": "property_history", "data": {"value": "555-122-2323", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044981822, "selected": false, "property": "phone", "vid": 651}, "emitted_at": 1654677520230} -{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044982142, "selected": false, "property": "num_conversion_events", "vid": 651}, "emitted_at": 1654677520231} -{"stream": "property_history", "data": {"value": "subscriber", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044981830, "selected": false, "property": "lifecyclestage", "vid": 651}, "emitted_at": 1654677520231} -{"stream": "property_history", "data": {"value": "test contact 8", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044986968, "selected": false, "property": "firstname", "vid": 1001}, "emitted_at": 1654677520232} -{"stream": "property_history", "data": {"value": "Cambridge", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044986968, "selected": false, "property": "city", "vid": 1001}, "emitted_at": 1654677520232} -{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044987352, "selected": false, "property": "num_unique_conversion_events", "vid": 1001}, "emitted_at": 1654677520232} -{"stream": "property_history", "data": {"value": "1634044986974", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044986974, "selected": false, "property": "createdate", "vid": 1001}, "emitted_at": 1654677520233} -{"stream": "property_history", "data": {"value": "HubSpot Test", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044986968, "selected": false, "property": "company", "vid": 1001}, "emitted_at": 1654677520234} -{"stream": "property_history", "data": {"value": "MA", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044986968, "selected": false, "property": "state", "vid": 1001}, "emitted_at": 1654677520234} -{"stream": "property_history", "data": {"value": "testingapicontact_8@hubspot.com", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044986974, "selected": false, "property": "email", "vid": 1001}, "emitted_at": 1654677520234} -{"stream": "property_history", "data": {"value": "02139", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044986968, "selected": false, "property": "zip", "vid": 1001}, "emitted_at": 1654677520234} -{"stream": "property_history", "data": {"value": "http://hubspot.com", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044986968, "selected": false, "property": "website", "vid": 1001}, "emitted_at": 1654677520235} -{"stream": "property_history", "data": {"value": "8 First Street", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044986968, "selected": false, "property": "address", "vid": 1001}, "emitted_at": 1654677520235} -{"stream": "property_history", "data": {"value": "testerson number 8", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044986968, "selected": false, "property": "lastname", "vid": 1001}, "emitted_at": 1654677520235} -{"stream": "property_history", "data": {"value": "555-122-2323", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044986968, "selected": false, "property": "phone", "vid": 1001}, "emitted_at": 1654677520236} -{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044987352, "selected": false, "property": "num_conversion_events", "vid": 1001}, "emitted_at": 1654677520236} -{"stream": "property_history", "data": {"value": "subscriber", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044986974, "selected": false, "property": "lifecyclestage", "vid": 1001}, "emitted_at": 1654677520237} -{"stream": "property_history", "data": {"value": "0", "source-type": "MIGRATION", "source-id": "BackfillReadtimeCalculatedPropertiesJob", "source-label": null, "updated-by-user-id": null, "timestamp": 1628718463905, "selected": false, "property": "num_unique_conversion_events", "vid": 501}, "emitted_at": 1654677520163} -{"stream": "property_history", "data": {"value": "1616173106539", "source-type": "CONTACTS", "source-id": "CRM_UI", "source-label": null, "updated-by-user-id": 12282590, "timestamp": 1616173106539, "selected": false, "property": "createdate", "vid": 501}, "emitted_at": 1654677520165} -{"stream": "property_history", "data": {"value": "52550153", "source-type": "CRM_UI", "source-id": "userId:12282590", "source-label": null, "updated-by-user-id": 12282590, "timestamp": 1616173106523, "selected": false, "property": "hubspot_owner_id", "vid": 501}, "emitted_at": 1654677520167} -{"stream": "property_history", "data": {"value": "1621592488593", "source-type": "DEALS", "source-id": "DealRollupProperties", "source-label": null, "updated-by-user-id": null, "timestamp": 1621592560527, "selected": false, "property": "first_deal_created_date", "vid": 501}, "emitted_at": 1654677520168} -{"stream": "property_history", "data": {"value": "1", "source-type": "DEALS", "source-id": "DealRollupProperties", "source-label": null, "updated-by-user-id": null, "timestamp": 1621592560527, "selected": false, "property": "num_associated_deals", "vid": 501}, "emitted_at": 1654677520169} -{"stream": "property_history", "data": {"value": "Kulak", "source-type": "CRM_UI", "source-id": "userId:12282590", "source-label": null, "updated-by-user-id": 12282590, "timestamp": 1616173106523, "selected": false, "property": "lastname", "vid": 501}, "emitted_at": 1654677520175} -{"stream": "property_history", "data": {"value": "1111111111", "source-type": "CRM_UI", "source-id": "userId:12282590", "source-label": null, "updated-by-user-id": 12282590, "timestamp": 1616173106523, "selected": false, "property": "phone", "vid": 501}, "emitted_at": 1654677520178} -{"stream": "property_history", "data": {"value": "0", "source-type": "MIGRATION", "source-id": "BackfillReadtimeCalculatedPropertiesJob", "source-label": null, "updated-by-user-id": null, "timestamp": 1628718463905, "selected": false, "property": "num_conversion_events", "vid": 501}, "emitted_at": 1654677520181} -{"stream": "property_history", "data": {"value": "Eugene", "source-type": "CRM_UI", "source-id": "userId:12282590", "source-label": null, "updated-by-user-id": 12282590, "timestamp": 1616173106523, "selected": false, "property": "firstname", "vid": 501}, "emitted_at": 1654677520188} -{"stream": "property_history", "data": {"value": "california", "source-type": "HEISENBERG", "source-id": "EMAIL_OPEN (9ff40077-4b32-3ec5-9aea-d69fd12683b9)", "source-label": null, "updated-by-user-id": null, "timestamp": 1616173245743, "selected": false, "property": "ip_state", "vid": 501}, "emitted_at": 1654677520189} -{"stream": "property_history", "data": {"value": "united states", "source-type": "HEISENBERG", "source-id": "EMAIL_OPEN (9ff40077-4b32-3ec5-9aea-d69fd12683b9)", "source-label": null, "updated-by-user-id": null, "timestamp": 1616173245743, "selected": false, "property": "ip_country", "vid": 501}, "emitted_at": 1654677520190} -{"stream": "property_history", "data": {"value": "ca", "source-type": "HEISENBERG", "source-id": "EMAIL_OPEN (9ff40077-4b32-3ec5-9aea-d69fd12683b9)", "source-label": null, "updated-by-user-id": null, "timestamp": 1616173245743, "selected": false, "property": "ip_state_code", "vid": 501}, "emitted_at": 1654677520193} -{"stream": "property_history", "data": {"value": "kulak.eugene@gmail.com", "source-type": "CONTACTS", "source-id": "CRM_UI", "source-label": null, "updated-by-user-id": 12282590, "timestamp": 1616173106539, "selected": false, "property": "email", "vid": 501}, "emitted_at": 1654677520197} -{"stream": "property_history", "data": {"value": "us", "source-type": "HEISENBERG", "source-id": "EMAIL_OPEN (9ff40077-4b32-3ec5-9aea-d69fd12683b9)", "source-label": null, "updated-by-user-id": null, "timestamp": 1616173245743, "selected": false, "property": "ip_country_code", "vid": 501}, "emitted_at": 1654677520198} -{"stream": "property_history", "data": {"value": "mountain view", "source-type": "HEISENBERG", "source-id": "EMAIL_OPEN (9ff40077-4b32-3ec5-9aea-d69fd12683b9)", "source-label": null, "updated-by-user-id": null, "timestamp": 1616173245743, "selected": false, "property": "ip_city", "vid": 501}, "emitted_at": 1654677520201} -{"stream": "property_history", "data": {"value": "1616173106523", "source-type": "CRM_UI", "source-id": "userId:12282590", "source-label": null, "updated-by-user-id": 12282590, "timestamp": 1616173106523, "selected": false, "property": "hubspot_owner_assigneddate", "vid": 501}, "emitted_at": 1654677520201} -{"stream": "property_history", "data": {"value": "opportunity", "source-type": "DEALS", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1621592560659, "selected": false, "property": "lifecyclestage", "vid": 501}, "emitted_at": 1654677520202} -{"stream": "property_history", "data": {"value": "subscriber", "source-type": "CRM_UI", "source-id": "userId:12282590", "source-label": null, "updated-by-user-id": 12282590, "timestamp": 1616173106523, "selected": false, "property": "lifecyclestage", "vid": 501}, "emitted_at": 1654677520203} -{"stream": "property_history", "data": {"value": "test contact 0", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970920, "selected": false, "property": "firstname", "vid": 601}, "emitted_at": 1654677520205} -{"stream": "property_history", "data": {"value": "Cambridge", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970920, "selected": false, "property": "city", "vid": 601}, "emitted_at": 1654677520205} -{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044971482, "selected": false, "property": "num_unique_conversion_events", "vid": 601}, "emitted_at": 1654677520205} -{"stream": "property_history", "data": {"value": "1634044970930", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970930, "selected": false, "property": "createdate", "vid": 601}, "emitted_at": 1654677520207} -{"stream": "property_history", "data": {"value": "HubSpot Test", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970920, "selected": false, "property": "company", "vid": 601}, "emitted_at": 1654677520211} -{"stream": "property_history", "data": {"value": "MA", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970920, "selected": false, "property": "state", "vid": 601}, "emitted_at": 1654677520211} -{"stream": "property_history", "data": {"value": "testingapicontact_0@hubspot.com", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970930, "selected": false, "property": "email", "vid": 601}, "emitted_at": 1654677520212} -{"stream": "property_history", "data": {"value": "02139", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970920, "selected": false, "property": "zip", "vid": 601}, "emitted_at": 1654677520212} -{"stream": "property_history", "data": {"value": "http://hubspot.com", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970920, "selected": false, "property": "website", "vid": 601}, "emitted_at": 1654677520212} -{"stream": "property_history", "data": {"value": "0 First Street", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970920, "selected": false, "property": "address", "vid": 601}, "emitted_at": 1654677520213} -{"stream": "property_history", "data": {"value": "testerson number 0", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970920, "selected": false, "property": "lastname", "vid": 601}, "emitted_at": 1654677520214} -{"stream": "property_history", "data": {"value": "555-122-2323", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970920, "selected": false, "property": "phone", "vid": 601}, "emitted_at": 1654677520215} -{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044971482, "selected": false, "property": "num_conversion_events", "vid": 601}, "emitted_at": 1654677520215} -{"stream": "property_history", "data": {"value": "subscriber", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970930, "selected": false, "property": "lifecyclestage", "vid": 601}, "emitted_at": 1654677520216} -{"stream": "property_history", "data": {"value": "test contact 10", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988081, "selected": false, "property": "firstname", "vid": 1101}, "emitted_at": 1654677520217} -{"stream": "property_history", "data": {"value": "Cambridge", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988081, "selected": false, "property": "city", "vid": 1101}, "emitted_at": 1654677520217} -{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988327, "selected": false, "property": "num_unique_conversion_events", "vid": 1101}, "emitted_at": 1654677520217} -{"stream": "property_history", "data": {"value": "1634044988089", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988089, "selected": false, "property": "createdate", "vid": 1101}, "emitted_at": 1654677520218} -{"stream": "property_history", "data": {"value": "HubSpot Test", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988081, "selected": false, "property": "company", "vid": 1101}, "emitted_at": 1654677520219} -{"stream": "property_history", "data": {"value": "MA", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988081, "selected": false, "property": "state", "vid": 1101}, "emitted_at": 1654677520219} -{"stream": "property_history", "data": {"value": "testingapicontact_10@hubspot.com", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988089, "selected": false, "property": "email", "vid": 1101}, "emitted_at": 1654677520220} -{"stream": "property_history", "data": {"value": "02139", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988081, "selected": false, "property": "zip", "vid": 1101}, "emitted_at": 1654677520220} -{"stream": "property_history", "data": {"value": "http://hubspot.com", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988081, "selected": false, "property": "website", "vid": 1101}, "emitted_at": 1654677520220} -{"stream": "property_history", "data": {"value": "10 First Street", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988081, "selected": false, "property": "address", "vid": 1101}, "emitted_at": 1654677520220} -{"stream": "property_history", "data": {"value": "testerson number 10", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988081, "selected": false, "property": "lastname", "vid": 1101}, "emitted_at": 1654677520221} -{"stream": "property_history", "data": {"value": "555-122-2323", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988081, "selected": false, "property": "phone", "vid": 1101}, "emitted_at": 1654677520222} -{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988327, "selected": false, "property": "num_conversion_events", "vid": 1101}, "emitted_at": 1654677520223} -{"stream": "property_history", "data": {"value": "subscriber", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988089, "selected": false, "property": "lifecyclestage", "vid": 1101}, "emitted_at": 1654677520224} -{"stream": "property_history", "data": {"value": "test contact 1", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044981822, "selected": false, "property": "firstname", "vid": 651}, "emitted_at": 1654677520225} -{"stream": "property_history", "data": {"value": "Cambridge", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044981822, "selected": false, "property": "city", "vid": 651}, "emitted_at": 1654677520225} -{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044982142, "selected": false, "property": "num_unique_conversion_events", "vid": 651}, "emitted_at": 1654677520225} -{"stream": "property_history", "data": {"value": "1634044981830", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044981830, "selected": false, "property": "createdate", "vid": 651}, "emitted_at": 1654677520226} -{"stream": "property_history", "data": {"value": "HubSpot Test", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044981822, "selected": false, "property": "company", "vid": 651}, "emitted_at": 1654677520228} -{"stream": "property_history", "data": {"value": "MA", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044981822, "selected": false, "property": "state", "vid": 651}, "emitted_at": 1654677520229} -{"stream": "property_history", "data": {"value": "testingapicontact_1@hubspot.com", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044981830, "selected": false, "property": "email", "vid": 651}, "emitted_at": 1654677520229} -{"stream": "property_history", "data": {"value": "02139", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044981822, "selected": false, "property": "zip", "vid": 651}, "emitted_at": 1654677520229} -{"stream": "property_history", "data": {"value": "http://hubspot.com", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044981822, "selected": false, "property": "website", "vid": 651}, "emitted_at": 1654677520229} -{"stream": "property_history", "data": {"value": "1 First Street", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044981822, "selected": false, "property": "address", "vid": 651}, "emitted_at": 1654677520229} -{"stream": "property_history", "data": {"value": "testerson number 1", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044981822, "selected": false, "property": "lastname", "vid": 651}, "emitted_at": 1654677520230} -{"stream": "property_history", "data": {"value": "555-122-2323", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044981822, "selected": false, "property": "phone", "vid": 651}, "emitted_at": 1654677520230} -{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044982142, "selected": false, "property": "num_conversion_events", "vid": 651}, "emitted_at": 1654677520231} -{"stream": "property_history", "data": {"value": "subscriber", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044981830, "selected": false, "property": "lifecyclestage", "vid": 651}, "emitted_at": 1654677520231} -{"stream": "property_history", "data": {"value": "test contact 8", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044986968, "selected": false, "property": "firstname", "vid": 1001}, "emitted_at": 1654677520232} -{"stream": "property_history", "data": {"value": "Cambridge", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044986968, "selected": false, "property": "city", "vid": 1001}, "emitted_at": 1654677520232} -{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044987352, "selected": false, "property": "num_unique_conversion_events", "vid": 1001}, "emitted_at": 1654677520232} -{"stream": "property_history", "data": {"value": "1634044986974", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044986974, "selected": false, "property": "createdate", "vid": 1001}, "emitted_at": 1654677520233} -{"stream": "property_history", "data": {"value": "HubSpot Test", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044986968, "selected": false, "property": "company", "vid": 1001}, "emitted_at": 1654677520234} -{"stream": "property_history", "data": {"value": "MA", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044986968, "selected": false, "property": "state", "vid": 1001}, "emitted_at": 1654677520234} -{"stream": "property_history", "data": {"value": "testingapicontact_8@hubspot.com", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044986974, "selected": false, "property": "email", "vid": 1001}, "emitted_at": 1654677520234} -{"stream": "property_history", "data": {"value": "02139", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044986968, "selected": false, "property": "zip", "vid": 1001}, "emitted_at": 1654677520234} -{"stream": "property_history", "data": {"value": "http://hubspot.com", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044986968, "selected": false, "property": "website", "vid": 1001}, "emitted_at": 1654677520235} -{"stream": "property_history", "data": {"value": "8 First Street", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044986968, "selected": false, "property": "address", "vid": 1001}, "emitted_at": 1654677520235} -{"stream": "property_history", "data": {"value": "testerson number 8", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044986968, "selected": false, "property": "lastname", "vid": 1001}, "emitted_at": 1654677520235} -{"stream": "property_history", "data": {"value": "555-122-2323", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044986968, "selected": false, "property": "phone", "vid": 1001}, "emitted_at": 1654677520236} -{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044987352, "selected": false, "property": "num_conversion_events", "vid": 1001}, "emitted_at": 1654677520236} -{"stream": "property_history", "data": {"value": "subscriber", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044986974, "selected": false, "property": "lifecyclestage", "vid": 1001}, "emitted_at": 1654677520237} \ No newline at end of file +{"stream": "property_history", "data": {"value": "API", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044984836, "selected": false, "property": "hs_latest_source_data_1", "vid": 701}, "emitted_at": 1655361780761} +{"stream": "property_history", "data": {"value": "true", "source-type": "CALCULATED", "source-id": "CalculatedPropertyComputer", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044982387, "selected": false, "property": "hs_is_unworked", "vid": 701}, "emitted_at": 1655361780763} +{"stream": "property_history", "data": {"value": "test contact 2", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044982379, "selected": false, "property": "firstname", "vid": 701}, "emitted_at": 1655361780764} +{"stream": "property_history", "data": {"value": "Cambridge", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044982379, "selected": false, "property": "city", "vid": 701}, "emitted_at": 1655361780764} +{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044982735, "selected": false, "property": "num_unique_conversion_events", "vid": 701}, "emitted_at": 1655361780765} +{"stream": "property_history", "data": {"value": "OFFLINE", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044984836, "selected": false, "property": "hs_latest_source", "vid": 701}, "emitted_at": 1655361780766} +{"stream": "property_history", "data": {"value": "contacts-lifecycle-pipeline", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044982387, "selected": false, "property": "hs_pipeline", "vid": 701}, "emitted_at": 1655361780766} +{"stream": "property_history", "data": {"value": "0.0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044984836, "selected": false, "property": "hs_analytics_revenue", "vid": 701}, "emitted_at": 1655361780767} +{"stream": "property_history", "data": {"value": "1634044982387", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044982387, "selected": false, "property": "createdate", "vid": 701}, "emitted_at": 1655361780768} +{"stream": "property_history", "data": {"value": "0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044984836, "selected": false, "property": "hs_analytics_num_visits", "vid": 701}, "emitted_at": 1655361780769} +{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": "RollupProperties", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044983225, "selected": false, "property": "hs_sequences_actively_enrolled_count", "vid": 701}, "emitted_at": 1655361780770} +{"stream": "property_history", "data": {"value": "false", "source-type": "MIGRATION", "source-id": "BulkSetMarketableStatusProcessor", "source-label": null, "updated-by-user-id": null, "timestamp": 1635860523515, "selected": false, "property": "hs_marketable_until_renewal", "vid": 701}, "emitted_at": 1655361780770} +{"stream": "property_history", "data": {"value": "false", "source-type": "MIGRATION", "source-id": "BulkSetMarketableStatusProcessor", "source-label": null, "updated-by-user-id": null, "timestamp": 1635860523515, "selected": false, "property": "hs_marketable_status", "vid": 701}, "emitted_at": 1655361780771} +{"stream": "property_history", "data": {"value": "OFFLINE", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044984836, "selected": false, "property": "hs_analytics_source", "vid": 701}, "emitted_at": 1655361780771} +{"stream": "property_history", "data": {"value": "1634044982387", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044982387, "selected": false, "property": "hs_date_entered_subscriber", "vid": 701}, "emitted_at": 1655361780772} +{"stream": "property_history", "data": {"value": "5551222323", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044982387, "selected": false, "property": "hs_searchable_calculated_phone_number", "vid": 701}, "emitted_at": 1655361780773} +{"stream": "property_history", "data": {"value": "hubspot.com", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044982387, "selected": false, "property": "hs_email_domain", "vid": 701}, "emitted_at": 1655361780773} +{"stream": "property_history", "data": {"value": "0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044984836, "selected": false, "property": "hs_analytics_num_page_views", "vid": 701}, "emitted_at": 1655361780774} +{"stream": "property_history", "data": {"value": "HubSpot Test", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044982379, "selected": false, "property": "company", "vid": 701}, "emitted_at": 1655361780775} +{"stream": "property_history", "data": {"value": "MA", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044982379, "selected": false, "property": "state", "vid": 701}, "emitted_at": 1655361780775} +{"stream": "property_history", "data": {"value": "testingapicontact_2@hubspot.com", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044982387, "selected": false, "property": "email", "vid": 701}, "emitted_at": 1655361780776} +{"stream": "property_history", "data": {"value": "02139", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044982379, "selected": false, "property": "zip", "vid": 701}, "emitted_at": 1655361780776} +{"stream": "property_history", "data": {"value": "http://hubspot.com", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044982379, "selected": false, "property": "website", "vid": 701}, "emitted_at": 1655361780777} +{"stream": "property_history", "data": {"value": "2 First Street", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044982379, "selected": false, "property": "address", "vid": 701}, "emitted_at": 1655361780778} +{"stream": "property_history", "data": {"value": "1634044982387", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044984836, "selected": false, "property": "hs_analytics_first_timestamp", "vid": 701}, "emitted_at": 1655361780778} +{"stream": "property_history", "data": {"value": "1634044982387", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044982387, "selected": false, "property": "hs_lifecyclestage_subscriber_date", "vid": 701}, "emitted_at": 1655361780779} +{"stream": "property_history", "data": {"value": "0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044984836, "selected": false, "property": "hs_analytics_average_page_views", "vid": 701}, "emitted_at": 1655361780780} +{"stream": "property_history", "data": {"value": "testerson number 2", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044982379, "selected": false, "property": "lastname", "vid": 701}, "emitted_at": 1655361780780} +{"stream": "property_history", "data": {"value": "701", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044982387, "selected": false, "property": "hs_all_contact_vids", "vid": 701}, "emitted_at": 1655361780781} +{"stream": "property_history", "data": {"value": "tier_3", "source-type": "CALCULATED", "source-id": null, "source-label": "HubSpot Predictive Contact Scoring Model", "updated-by-user-id": null, "timestamp": 1638773601495, "selected": false, "property": "hs_predictivescoringtier", "vid": 701}, "emitted_at": 1655361780782} +{"stream": "property_history", "data": {"value": "tier_1", "source-type": "CALCULATED", "source-id": null, "source-label": "HubSpot Predictive Contact Scoring Model", "updated-by-user-id": null, "timestamp": 1637664426900, "selected": false, "property": "hs_predictivescoringtier", "vid": 701}, "emitted_at": 1655361780782} +{"stream": "property_history", "data": {"value": "555-122-2323", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044982379, "selected": false, "property": "phone", "vid": 701}, "emitted_at": 1655361780783} +{"stream": "property_history", "data": {"value": "3.79", "source-type": "CALCULATED", "source-id": null, "source-label": "HubSpot Predictive Contact Scoring Model", "updated-by-user-id": null, "timestamp": 1637664426900, "selected": false, "property": "hs_predictivecontactscore_v2", "vid": 701}, "emitted_at": 1655361780783} +{"stream": "property_history", "data": {"value": "true", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044982387, "selected": false, "property": "hs_is_contact", "vid": 701}, "emitted_at": 1655361780784} +{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044982735, "selected": false, "property": "num_conversion_events", "vid": 701}, "emitted_at": 1655361780785} +{"stream": "property_history", "data": {"value": "701", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044982387, "selected": false, "property": "hs_object_id", "vid": 701}, "emitted_at": 1655361780785} +{"stream": "property_history", "data": {"value": "0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044984836, "selected": false, "property": "hs_analytics_num_event_completions", "vid": 701}, "emitted_at": 1655361780786} +{"stream": "property_history", "data": {"value": "API", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044984836, "selected": false, "property": "hs_analytics_source_data_1", "vid": 701}, "emitted_at": 1655361780786} +{"stream": "property_history", "data": {"value": "subscriber", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044982387, "selected": false, "property": "lifecyclestage", "vid": 701}, "emitted_at": 1655361780787} +{"stream": "property_history", "data": {"value": "API", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044992314, "selected": false, "property": "hs_latest_source_data_1", "vid": 1251}, "emitted_at": 1655361780787} +{"stream": "property_history", "data": {"value": "true", "source-type": "CALCULATED", "source-id": "CalculatedPropertyComputer", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044989572, "selected": false, "property": "hs_is_unworked", "vid": 1251}, "emitted_at": 1655361780787} +{"stream": "property_history", "data": {"value": "test contact 13", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044989563, "selected": false, "property": "firstname", "vid": 1251}, "emitted_at": 1655361780788} +{"stream": "property_history", "data": {"value": "Cambridge", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044989563, "selected": false, "property": "city", "vid": 1251}, "emitted_at": 1655361780788} +{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044989827, "selected": false, "property": "num_unique_conversion_events", "vid": 1251}, "emitted_at": 1655361780789} +{"stream": "property_history", "data": {"value": "OFFLINE", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044992314, "selected": false, "property": "hs_latest_source", "vid": 1251}, "emitted_at": 1655361780789} +{"stream": "property_history", "data": {"value": "contacts-lifecycle-pipeline", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044989572, "selected": false, "property": "hs_pipeline", "vid": 1251}, "emitted_at": 1655361780789} +{"stream": "property_history", "data": {"value": "0.0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044992314, "selected": false, "property": "hs_analytics_revenue", "vid": 1251}, "emitted_at": 1655361780790} +{"stream": "property_history", "data": {"value": "1634044989572", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044989572, "selected": false, "property": "createdate", "vid": 1251}, "emitted_at": 1655361780790} +{"stream": "property_history", "data": {"value": "0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044992314, "selected": false, "property": "hs_analytics_num_visits", "vid": 1251}, "emitted_at": 1655361780791} +{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": "RollupProperties", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044990383, "selected": false, "property": "hs_sequences_actively_enrolled_count", "vid": 1251}, "emitted_at": 1655361780791} +{"stream": "property_history", "data": {"value": "false", "source-type": "MIGRATION", "source-id": "BulkSetMarketableStatusProcessor", "source-label": null, "updated-by-user-id": null, "timestamp": 1635860523515, "selected": false, "property": "hs_marketable_until_renewal", "vid": 1251}, "emitted_at": 1655361780792} +{"stream": "property_history", "data": {"value": "false", "source-type": "MIGRATION", "source-id": "BulkSetMarketableStatusProcessor", "source-label": null, "updated-by-user-id": null, "timestamp": 1635860523515, "selected": false, "property": "hs_marketable_status", "vid": 1251}, "emitted_at": 1655361780792} +{"stream": "property_history", "data": {"value": "OFFLINE", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044992314, "selected": false, "property": "hs_analytics_source", "vid": 1251}, "emitted_at": 1655361780793} +{"stream": "property_history", "data": {"value": "1634044989572", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044989572, "selected": false, "property": "hs_date_entered_subscriber", "vid": 1251}, "emitted_at": 1655361780793} +{"stream": "property_history", "data": {"value": "5551222323", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044989572, "selected": false, "property": "hs_searchable_calculated_phone_number", "vid": 1251}, "emitted_at": 1655361780794} +{"stream": "property_history", "data": {"value": "hubspot.com", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044989572, "selected": false, "property": "hs_email_domain", "vid": 1251}, "emitted_at": 1655361780794} +{"stream": "property_history", "data": {"value": "0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044992314, "selected": false, "property": "hs_analytics_num_page_views", "vid": 1251}, "emitted_at": 1655361780794} +{"stream": "property_history", "data": {"value": "HubSpot Test", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044989563, "selected": false, "property": "company", "vid": 1251}, "emitted_at": 1655361780795} +{"stream": "property_history", "data": {"value": "MA", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044989563, "selected": false, "property": "state", "vid": 1251}, "emitted_at": 1655361780795} +{"stream": "property_history", "data": {"value": "testingapicontact_13@hubspot.com", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044989572, "selected": false, "property": "email", "vid": 1251}, "emitted_at": 1655361780795} +{"stream": "property_history", "data": {"value": "02139", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044989563, "selected": false, "property": "zip", "vid": 1251}, "emitted_at": 1655361780796} +{"stream": "property_history", "data": {"value": "http://hubspot.com", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044989563, "selected": false, "property": "website", "vid": 1251}, "emitted_at": 1655361780796} +{"stream": "property_history", "data": {"value": "13 First Street", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044989563, "selected": false, "property": "address", "vid": 1251}, "emitted_at": 1655361780796} +{"stream": "property_history", "data": {"value": "1634044989572", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044992314, "selected": false, "property": "hs_analytics_first_timestamp", "vid": 1251}, "emitted_at": 1655361780796} +{"stream": "property_history", "data": {"value": "1634044989572", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044989572, "selected": false, "property": "hs_lifecyclestage_subscriber_date", "vid": 1251}, "emitted_at": 1655361780797} +{"stream": "property_history", "data": {"value": "0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044992314, "selected": false, "property": "hs_analytics_average_page_views", "vid": 1251}, "emitted_at": 1655361780797} +{"stream": "property_history", "data": {"value": "testerson number 13", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044989563, "selected": false, "property": "lastname", "vid": 1251}, "emitted_at": 1655361780797} +{"stream": "property_history", "data": {"value": "1251", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044989572, "selected": false, "property": "hs_all_contact_vids", "vid": 1251}, "emitted_at": 1655361780797} +{"stream": "property_history", "data": {"value": "tier_3", "source-type": "CALCULATED", "source-id": null, "source-label": "HubSpot Predictive Contact Scoring Model", "updated-by-user-id": null, "timestamp": 1638773612582, "selected": false, "property": "hs_predictivescoringtier", "vid": 1251}, "emitted_at": 1655361780798} +{"stream": "property_history", "data": {"value": "tier_1", "source-type": "CALCULATED", "source-id": null, "source-label": "HubSpot Predictive Contact Scoring Model", "updated-by-user-id": null, "timestamp": 1637664483985, "selected": false, "property": "hs_predictivescoringtier", "vid": 1251}, "emitted_at": 1655361780798} +{"stream": "property_history", "data": {"value": "555-122-2323", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044989563, "selected": false, "property": "phone", "vid": 1251}, "emitted_at": 1655361780798} +{"stream": "property_history", "data": {"value": "3.79", "source-type": "CALCULATED", "source-id": null, "source-label": "HubSpot Predictive Contact Scoring Model", "updated-by-user-id": null, "timestamp": 1637664483985, "selected": false, "property": "hs_predictivecontactscore_v2", "vid": 1251}, "emitted_at": 1655361780798} +{"stream": "property_history", "data": {"value": "true", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044989572, "selected": false, "property": "hs_is_contact", "vid": 1251}, "emitted_at": 1655361780799} +{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044989827, "selected": false, "property": "num_conversion_events", "vid": 1251}, "emitted_at": 1655361780799} +{"stream": "property_history", "data": {"value": "1251", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044989572, "selected": false, "property": "hs_object_id", "vid": 1251}, "emitted_at": 1655361780799} +{"stream": "property_history", "data": {"value": "0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044992314, "selected": false, "property": "hs_analytics_num_event_completions", "vid": 1251}, "emitted_at": 1655361780800} +{"stream": "property_history", "data": {"value": "API", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044992314, "selected": false, "property": "hs_analytics_source_data_1", "vid": 1251}, "emitted_at": 1655361780800} +{"stream": "property_history", "data": {"value": "subscriber", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044989572, "selected": false, "property": "lifecyclestage", "vid": 1251}, "emitted_at": 1655361780800} +{"stream": "property_history", "data": {"value": "API", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044993567, "selected": false, "property": "hs_latest_source_data_1", "vid": 1451}, "emitted_at": 1655361780800} +{"stream": "property_history", "data": {"value": "true", "source-type": "CALCULATED", "source-id": "CalculatedPropertyComputer", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044992168, "selected": false, "property": "hs_is_unworked", "vid": 1451}, "emitted_at": 1655361780801} +{"stream": "property_history", "data": {"value": "test contact 17", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044992160, "selected": false, "property": "firstname", "vid": 1451}, "emitted_at": 1655361780801} +{"stream": "property_history", "data": {"value": "Cambridge", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044992160, "selected": false, "property": "city", "vid": 1451}, "emitted_at": 1655361780801} +{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044992586, "selected": false, "property": "num_unique_conversion_events", "vid": 1451}, "emitted_at": 1655361780801} +{"stream": "property_history", "data": {"value": "OFFLINE", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044993567, "selected": false, "property": "hs_latest_source", "vid": 1451}, "emitted_at": 1655361780801} +{"stream": "property_history", "data": {"value": "contacts-lifecycle-pipeline", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044992168, "selected": false, "property": "hs_pipeline", "vid": 1451}, "emitted_at": 1655361780802} +{"stream": "property_history", "data": {"value": "0.0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044993567, "selected": false, "property": "hs_analytics_revenue", "vid": 1451}, "emitted_at": 1655361780802} +{"stream": "property_history", "data": {"value": "1634044992168", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044992168, "selected": false, "property": "createdate", "vid": 1451}, "emitted_at": 1655361780802} +{"stream": "property_history", "data": {"value": "0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044993567, "selected": false, "property": "hs_analytics_num_visits", "vid": 1451}, "emitted_at": 1655361780802} +{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": "RollupProperties", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044992806, "selected": false, "property": "hs_sequences_actively_enrolled_count", "vid": 1451}, "emitted_at": 1655361780803} +{"stream": "property_history", "data": {"value": "false", "source-type": "MIGRATION", "source-id": "BulkSetMarketableStatusProcessor", "source-label": null, "updated-by-user-id": null, "timestamp": 1635860523515, "selected": false, "property": "hs_marketable_until_renewal", "vid": 1451}, "emitted_at": 1655361780803} +{"stream": "property_history", "data": {"value": "false", "source-type": "MIGRATION", "source-id": "BulkSetMarketableStatusProcessor", "source-label": null, "updated-by-user-id": null, "timestamp": 1635860523515, "selected": false, "property": "hs_marketable_status", "vid": 1451}, "emitted_at": 1655361780803} +{"stream": "property_history", "data": {"value": "OFFLINE", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044993567, "selected": false, "property": "hs_analytics_source", "vid": 1451}, "emitted_at": 1655361780803} +{"stream": "property_history", "data": {"value": "1634044992168", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044992168, "selected": false, "property": "hs_date_entered_subscriber", "vid": 1451}, "emitted_at": 1655361780804} +{"stream": "property_history", "data": {"value": "5551222323", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044992168, "selected": false, "property": "hs_searchable_calculated_phone_number", "vid": 1451}, "emitted_at": 1655361780804} +{"stream": "property_history", "data": {"value": "hubspot.com", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044992168, "selected": false, "property": "hs_email_domain", "vid": 1451}, "emitted_at": 1655361780804} +{"stream": "property_history", "data": {"value": "0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044993567, "selected": false, "property": "hs_analytics_num_page_views", "vid": 1451}, "emitted_at": 1655361780804} +{"stream": "property_history", "data": {"value": "HubSpot Test", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044992160, "selected": false, "property": "company", "vid": 1451}, "emitted_at": 1655361780804} +{"stream": "property_history", "data": {"value": "MA", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044992160, "selected": false, "property": "state", "vid": 1451}, "emitted_at": 1655361780805} +{"stream": "property_history", "data": {"value": "testingapicontact_17@hubspot.com", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044992168, "selected": false, "property": "email", "vid": 1451}, "emitted_at": 1655361780805} +{"stream": "property_history", "data": {"value": "02139", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044992160, "selected": false, "property": "zip", "vid": 1451}, "emitted_at": 1655361780805} +{"stream": "property_history", "data": {"value": "http://hubspot.com", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044992160, "selected": false, "property": "website", "vid": 1451}, "emitted_at": 1655361780805} +{"stream": "property_history", "data": {"value": "17 First Street", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044992160, "selected": false, "property": "address", "vid": 1451}, "emitted_at": 1655361780805} +{"stream": "property_history", "data": {"value": "1634044992168", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044993567, "selected": false, "property": "hs_analytics_first_timestamp", "vid": 1451}, "emitted_at": 1655361780806} +{"stream": "property_history", "data": {"value": "1634044992168", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044992168, "selected": false, "property": "hs_lifecyclestage_subscriber_date", "vid": 1451}, "emitted_at": 1655361780806} +{"stream": "property_history", "data": {"value": "0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044993567, "selected": false, "property": "hs_analytics_average_page_views", "vid": 1451}, "emitted_at": 1655361780806} +{"stream": "property_history", "data": {"value": "testerson number 17", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044992160, "selected": false, "property": "lastname", "vid": 1451}, "emitted_at": 1655361780806} +{"stream": "property_history", "data": {"value": "1451", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044992168, "selected": false, "property": "hs_all_contact_vids", "vid": 1451}, "emitted_at": 1655361780806} +{"stream": "property_history", "data": {"value": "tier_3", "source-type": "CALCULATED", "source-id": null, "source-label": "HubSpot Predictive Contact Scoring Model", "updated-by-user-id": null, "timestamp": 1638773546271, "selected": false, "property": "hs_predictivescoringtier", "vid": 1451}, "emitted_at": 1655361780807} +{"stream": "property_history", "data": {"value": "tier_1", "source-type": "CALCULATED", "source-id": null, "source-label": "HubSpot Predictive Contact Scoring Model", "updated-by-user-id": null, "timestamp": 1637664413718, "selected": false, "property": "hs_predictivescoringtier", "vid": 1451}, "emitted_at": 1655361780807} +{"stream": "property_history", "data": {"value": "555-122-2323", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044992160, "selected": false, "property": "phone", "vid": 1451}, "emitted_at": 1655361780807} +{"stream": "property_history", "data": {"value": "3.79", "source-type": "CALCULATED", "source-id": null, "source-label": "HubSpot Predictive Contact Scoring Model", "updated-by-user-id": null, "timestamp": 1637664413718, "selected": false, "property": "hs_predictivecontactscore_v2", "vid": 1451}, "emitted_at": 1655361780807} +{"stream": "property_history", "data": {"value": "true", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044992168, "selected": false, "property": "hs_is_contact", "vid": 1451}, "emitted_at": 1655361780807} +{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044992586, "selected": false, "property": "num_conversion_events", "vid": 1451}, "emitted_at": 1655361780808} +{"stream": "property_history", "data": {"value": "1451", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044992168, "selected": false, "property": "hs_object_id", "vid": 1451}, "emitted_at": 1655361780808} +{"stream": "property_history", "data": {"value": "0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044993567, "selected": false, "property": "hs_analytics_num_event_completions", "vid": 1451}, "emitted_at": 1655361780808} +{"stream": "property_history", "data": {"value": "API", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044993567, "selected": false, "property": "hs_analytics_source_data_1", "vid": 1451}, "emitted_at": 1655361780808} +{"stream": "property_history", "data": {"value": "subscriber", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044992168, "selected": false, "property": "lifecyclestage", "vid": 1451}, "emitted_at": 1655361780808} +{"stream": "property_history", "data": {"value": "API", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044991654, "selected": false, "property": "hs_latest_source_data_1", "vid": 1351}, "emitted_at": 1655361780808} +{"stream": "property_history", "data": {"value": "true", "source-type": "CALCULATED", "source-id": "CalculatedPropertyComputer", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044990617, "selected": false, "property": "hs_is_unworked", "vid": 1351}, "emitted_at": 1655361780809} +{"stream": "property_history", "data": {"value": "test contact 15", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044990514, "selected": false, "property": "firstname", "vid": 1351}, "emitted_at": 1655361780809} +{"stream": "property_history", "data": {"value": "Cambridge", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044990514, "selected": false, "property": "city", "vid": 1351}, "emitted_at": 1655361780809} +{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044990943, "selected": false, "property": "num_unique_conversion_events", "vid": 1351}, "emitted_at": 1655361780809} +{"stream": "property_history", "data": {"value": "OFFLINE", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044991654, "selected": false, "property": "hs_latest_source", "vid": 1351}, "emitted_at": 1655361780809} +{"stream": "property_history", "data": {"value": "contacts-lifecycle-pipeline", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044990617, "selected": false, "property": "hs_pipeline", "vid": 1351}, "emitted_at": 1655361780810} +{"stream": "property_history", "data": {"value": "0.0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044991654, "selected": false, "property": "hs_analytics_revenue", "vid": 1351}, "emitted_at": 1655361780810} +{"stream": "property_history", "data": {"value": "1634044990617", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044990617, "selected": false, "property": "createdate", "vid": 1351}, "emitted_at": 1655361780810} +{"stream": "property_history", "data": {"value": "0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044991654, "selected": false, "property": "hs_analytics_num_visits", "vid": 1351}, "emitted_at": 1655361780810} +{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": "RollupProperties", "source-label": null, "updated-by-user-id": null, "timestamp": 1634045002341, "selected": false, "property": "hs_sequences_actively_enrolled_count", "vid": 1351}, "emitted_at": 1655361780811} +{"stream": "property_history", "data": {"value": "false", "source-type": "MIGRATION", "source-id": "BulkSetMarketableStatusProcessor", "source-label": null, "updated-by-user-id": null, "timestamp": 1635860523515, "selected": false, "property": "hs_marketable_until_renewal", "vid": 1351}, "emitted_at": 1655361780811} +{"stream": "property_history", "data": {"value": "false", "source-type": "MIGRATION", "source-id": "BulkSetMarketableStatusProcessor", "source-label": null, "updated-by-user-id": null, "timestamp": 1635860523515, "selected": false, "property": "hs_marketable_status", "vid": 1351}, "emitted_at": 1655361780811} +{"stream": "property_history", "data": {"value": "OFFLINE", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044991654, "selected": false, "property": "hs_analytics_source", "vid": 1351}, "emitted_at": 1655361780811} +{"stream": "property_history", "data": {"value": "1634044990617", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044990617, "selected": false, "property": "hs_date_entered_subscriber", "vid": 1351}, "emitted_at": 1655361780811} +{"stream": "property_history", "data": {"value": "5551222323", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044990617, "selected": false, "property": "hs_searchable_calculated_phone_number", "vid": 1351}, "emitted_at": 1655361780811} +{"stream": "property_history", "data": {"value": "hubspot.com", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044990617, "selected": false, "property": "hs_email_domain", "vid": 1351}, "emitted_at": 1655361780812} +{"stream": "property_history", "data": {"value": "0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044991654, "selected": false, "property": "hs_analytics_num_page_views", "vid": 1351}, "emitted_at": 1655361780812} +{"stream": "property_history", "data": {"value": "HubSpot Test", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044990514, "selected": false, "property": "company", "vid": 1351}, "emitted_at": 1655361780812} +{"stream": "property_history", "data": {"value": "MA", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044990514, "selected": false, "property": "state", "vid": 1351}, "emitted_at": 1655361780812} +{"stream": "property_history", "data": {"value": "testingapicontact_15@hubspot.com", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044990617, "selected": false, "property": "email", "vid": 1351}, "emitted_at": 1655361780812} +{"stream": "property_history", "data": {"value": "02139", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044990514, "selected": false, "property": "zip", "vid": 1351}, "emitted_at": 1655361780812} +{"stream": "property_history", "data": {"value": "http://hubspot.com", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044990514, "selected": false, "property": "website", "vid": 1351}, "emitted_at": 1655361780813} +{"stream": "property_history", "data": {"value": "15 First Street", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044990514, "selected": false, "property": "address", "vid": 1351}, "emitted_at": 1655361780813} +{"stream": "property_history", "data": {"value": "1634044990617", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044993193, "selected": false, "property": "hs_analytics_first_timestamp", "vid": 1351}, "emitted_at": 1655361780813} +{"stream": "property_history", "data": {"value": "1634044990651", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044991654, "selected": false, "property": "hs_analytics_first_timestamp", "vid": 1351}, "emitted_at": 1655361780813} +{"stream": "property_history", "data": {"value": "1634044990617", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044990617, "selected": false, "property": "hs_lifecyclestage_subscriber_date", "vid": 1351}, "emitted_at": 1655361780813} +{"stream": "property_history", "data": {"value": "0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044991654, "selected": false, "property": "hs_analytics_average_page_views", "vid": 1351}, "emitted_at": 1655361780813} +{"stream": "property_history", "data": {"value": "testerson number 15", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044990514, "selected": false, "property": "lastname", "vid": 1351}, "emitted_at": 1655361780813} +{"stream": "property_history", "data": {"value": "1351", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044990617, "selected": false, "property": "hs_all_contact_vids", "vid": 1351}, "emitted_at": 1655361780814} +{"stream": "property_history", "data": {"value": "tier_3", "source-type": "CALCULATED", "source-id": null, "source-label": "HubSpot Predictive Contact Scoring Model", "updated-by-user-id": null, "timestamp": 1638773576603, "selected": false, "property": "hs_predictivescoringtier", "vid": 1351}, "emitted_at": 1655361780814} +{"stream": "property_history", "data": {"value": "tier_1", "source-type": "CALCULATED", "source-id": null, "source-label": "HubSpot Predictive Contact Scoring Model", "updated-by-user-id": null, "timestamp": 1637664425658, "selected": false, "property": "hs_predictivescoringtier", "vid": 1351}, "emitted_at": 1655361780814} +{"stream": "property_history", "data": {"value": "555-122-2323", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044990514, "selected": false, "property": "phone", "vid": 1351}, "emitted_at": 1655361780814} +{"stream": "property_history", "data": {"value": "3.79", "source-type": "CALCULATED", "source-id": null, "source-label": "HubSpot Predictive Contact Scoring Model", "updated-by-user-id": null, "timestamp": 1637664425658, "selected": false, "property": "hs_predictivecontactscore_v2", "vid": 1351}, "emitted_at": 1655361780814} +{"stream": "property_history", "data": {"value": "true", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044990617, "selected": false, "property": "hs_is_contact", "vid": 1351}, "emitted_at": 1655361780814} +{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044990943, "selected": false, "property": "num_conversion_events", "vid": 1351}, "emitted_at": 1655361780815} +{"stream": "property_history", "data": {"value": "1351", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044990617, "selected": false, "property": "hs_object_id", "vid": 1351}, "emitted_at": 1655361780815} +{"stream": "property_history", "data": {"value": "0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044991654, "selected": false, "property": "hs_analytics_num_event_completions", "vid": 1351}, "emitted_at": 1655361780815} +{"stream": "property_history", "data": {"value": "API", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044991654, "selected": false, "property": "hs_analytics_source_data_1", "vid": 1351}, "emitted_at": 1655361780815} +{"stream": "property_history", "data": {"value": "subscriber", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044990617, "selected": false, "property": "lifecyclestage", "vid": 1351}, "emitted_at": 1655361780815} +{"stream": "property_history", "data": {"value": "Freely given consent from contact;Legitimate interest ā€“ existing customer;Legitimate interest ā€“ prospect/lead;Legitimate interest - other;Performance of a contract;Not applicable", "source-type": "CRM_UI", "source-id": "userId:12282590", "source-label": null, "updated-by-user-id": 12282590, "timestamp": 1616173106523, "selected": false, "property": "hs_legal_basis", "vid": 501}, "emitted_at": 1655361780815} +{"stream": "property_history", "data": {"value": "0", "source-type": "MIGRATION", "source-id": "BackfillReadtimeCalculatedPropertiesJob", "source-label": null, "updated-by-user-id": null, "timestamp": 1628718463905, "selected": false, "property": "num_unique_conversion_events", "vid": 501}, "emitted_at": 1655361780816} +{"stream": "property_history", "data": {"value": "OFFLINE", "source-type": "MIGRATION", "source-id": "BackfillContactUpdatesKafka", "source-label": null, "updated-by-user-id": null, "timestamp": 1641216341562, "selected": false, "property": "hs_latest_source", "vid": 501}, "emitted_at": 1655361780816} +{"stream": "property_history", "data": {"value": "0.0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1616173119734, "selected": false, "property": "hs_analytics_revenue", "vid": 501}, "emitted_at": 1655361780816} +{"stream": "property_history", "data": {"value": "1616173106539", "source-type": "CONTACTS", "source-id": "CRM_UI", "source-label": null, "updated-by-user-id": 12282590, "timestamp": 1616173106539, "selected": false, "property": "createdate", "vid": 501}, "emitted_at": 1655361780816} +{"stream": "property_history", "data": {"value": "52550153", "source-type": "CRM_UI", "source-id": "userId:12282590", "source-label": null, "updated-by-user-id": 12282590, "timestamp": 1616173106523, "selected": false, "property": "hubspot_owner_id", "vid": 501}, "emitted_at": 1655361780816} +{"stream": "property_history", "data": {"value": "1621592488593", "source-type": "DEALS", "source-id": "DealRollupProperties", "source-label": null, "updated-by-user-id": null, "timestamp": 1621592560527, "selected": false, "property": "first_deal_created_date", "vid": 501}, "emitted_at": 1655361780816} +{"stream": "property_history", "data": {"value": "1616173106523", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1616173106523, "selected": false, "property": "hs_date_entered_subscriber", "vid": 501}, "emitted_at": 1655361780817} +{"stream": "property_history", "data": {"value": "1", "source-type": "DEALS", "source-id": "DealRollupProperties", "source-label": null, "updated-by-user-id": null, "timestamp": 1621592560527, "selected": false, "property": "num_associated_deals", "vid": 501}, "emitted_at": 1655361780817} +{"stream": "property_history", "data": {"value": "0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1616173119734, "selected": false, "property": "hs_analytics_num_page_views", "vid": 501}, "emitted_at": 1655361780817} +{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": "CalculatedPropertyComputer", "source-label": null, "updated-by-user-id": null, "timestamp": 1616173106539, "selected": false, "property": "hs_count_is_worked", "vid": 501}, "emitted_at": 1655361780817} +{"stream": "property_history", "data": {"value": "5419382054", "source-type": "CALCULATED", "source-id": "CalculatedPropertyComputer", "source-label": null, "updated-by-user-id": null, "timestamp": 1621592560683, "selected": false, "property": "hs_time_between_contact_creation_and_deal_creation", "vid": 501}, "emitted_at": 1655361780817} +{"stream": "property_history", "data": {"value": "america_slash_los_angeles", "source-type": "MIGRATION", "source-id": "BackfillContactTimeZone", "source-label": null, "updated-by-user-id": null, "timestamp": 1637146781014, "selected": false, "property": "hs_timezone", "vid": 501}, "emitted_at": 1655361780817} +{"stream": "property_history", "data": {"value": "america_slash_los_angeles", "source-type": "HEISENBERG", "source-id": "EMAIL_OPEN (9ff40077-4b32-3ec5-9aea-d69fd12683b9)", "source-label": null, "updated-by-user-id": null, "timestamp": 1616173245743, "selected": false, "property": "hs_ip_timezone", "vid": 501}, "emitted_at": 1655361780818} +{"stream": "property_history", "data": {"value": "1616173106523", "source-type": "CRM_UI", "source-id": "userId:12282590", "source-label": null, "updated-by-user-id": 12282590, "timestamp": 1616173106523, "selected": false, "property": "hs_lifecyclestage_subscriber_date", "vid": 501}, "emitted_at": 1655361780818} +{"stream": "property_history", "data": {"value": "0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1616173119734, "selected": false, "property": "hs_analytics_average_page_views", "vid": 501}, "emitted_at": 1655361780818} +{"stream": "property_history", "data": {"value": "Kulak", "source-type": "CRM_UI", "source-id": "userId:12282590", "source-label": null, "updated-by-user-id": 12282590, "timestamp": 1616173106523, "selected": false, "property": "lastname", "vid": 501}, "emitted_at": 1655361780818} +{"stream": "property_history", "data": {"value": "501", "source-type": "MIGRATION", "source-id": "BackfillReadtimeCalculatedPropertiesJob", "source-label": null, "updated-by-user-id": null, "timestamp": 1628718463905, "selected": false, "property": "hs_all_contact_vids", "vid": 501}, "emitted_at": 1655361780818} +{"stream": "property_history", "data": {"value": "1621592560659", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1621592560659, "selected": false, "property": "hs_date_exited_subscriber", "vid": 501}, "emitted_at": 1655361780818} +{"stream": "property_history", "data": {"value": "1621592560659", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1621592560659, "selected": false, "property": "hs_date_entered_opportunity", "vid": 501}, "emitted_at": 1655361780819} +{"stream": "property_history", "data": {"value": "1111111111", "source-type": "CRM_UI", "source-id": "userId:12282590", "source-label": null, "updated-by-user-id": 12282590, "timestamp": 1616173106523, "selected": false, "property": "phone", "vid": 501}, "emitted_at": 1655361780819} +{"stream": "property_history", "data": {"value": "1.22", "source-type": "CALCULATED", "source-id": null, "source-label": "HubSpot Predictive Contact Scoring Model", "updated-by-user-id": null, "timestamp": 1637664418422, "selected": false, "property": "hs_predictivecontactscore_v2", "vid": 501}, "emitted_at": 1655361780819} +{"stream": "property_history", "data": {"value": "2.45", "source-type": "CALCULATED", "source-id": null, "source-label": "HubSpot Predictive Contact Scoring Model", "updated-by-user-id": null, "timestamp": 1621592598371, "selected": false, "property": "hs_predictivecontactscore_v2", "vid": 501}, "emitted_at": 1655361780819} +{"stream": "property_history", "data": {"value": "3.6", "source-type": "CALCULATED", "source-id": null, "source-label": "HubSpot Predictive Contact Scoring Model", "updated-by-user-id": null, "timestamp": 1616173139169, "selected": false, "property": "hs_predictivecontactscore_v2", "vid": 501}, "emitted_at": 1655361780819} +{"stream": "property_history", "data": {"value": "true", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1616173106539, "selected": false, "property": "hs_is_contact", "vid": 501}, "emitted_at": 1655361780819} +{"stream": "property_history", "data": {"value": "0", "source-type": "MIGRATION", "source-id": "BackfillReadtimeCalculatedPropertiesJob", "source-label": null, "updated-by-user-id": null, "timestamp": 1628718463905, "selected": false, "property": "num_conversion_events", "vid": 501}, "emitted_at": 1655361780819} +{"stream": "property_history", "data": {"value": "501", "source-type": "MIGRATION", "source-id": "BackfillReadtimeCalculatedPropertiesJob", "source-label": null, "updated-by-user-id": null, "timestamp": 1628718463905, "selected": false, "property": "hs_object_id", "vid": 501}, "emitted_at": 1655361780820} +{"stream": "property_history", "data": {"value": "0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1616173119734, "selected": false, "property": "hs_analytics_num_event_completions", "vid": 501}, "emitted_at": 1655361780820} +{"stream": "property_history", "data": {"value": "CRM_UI", "source-type": "MIGRATION", "source-id": "BackfillContactUpdatesKafka", "source-label": null, "updated-by-user-id": null, "timestamp": 1641216341562, "selected": false, "property": "hs_latest_source_data_2", "vid": 501}, "emitted_at": 1655361780820} +{"stream": "property_history", "data": {"value": "CONTACTS", "source-type": "MIGRATION", "source-id": "BackfillContactUpdatesKafka", "source-label": null, "updated-by-user-id": null, "timestamp": 1641216341562, "selected": false, "property": "hs_latest_source_data_1", "vid": 501}, "emitted_at": 1655361780820} +{"stream": "property_history", "data": {"value": "true", "source-type": "CALCULATED", "source-id": "CalculatedPropertyComputer", "source-label": null, "updated-by-user-id": null, "timestamp": 1616173106539, "selected": false, "property": "hs_is_unworked", "vid": 501}, "emitted_at": 1655361780820} +{"stream": "property_history", "data": {"value": "Eugene", "source-type": "CRM_UI", "source-id": "userId:12282590", "source-label": null, "updated-by-user-id": 12282590, "timestamp": 1616173106523, "selected": false, "property": "firstname", "vid": 501}, "emitted_at": 1655361780820} +{"stream": "property_history", "data": {"value": "california", "source-type": "HEISENBERG", "source-id": "EMAIL_OPEN (9ff40077-4b32-3ec5-9aea-d69fd12683b9)", "source-label": null, "updated-by-user-id": null, "timestamp": 1616173245743, "selected": false, "property": "ip_state", "vid": 501}, "emitted_at": 1655361780820} +{"stream": "property_history", "data": {"value": "united states", "source-type": "HEISENBERG", "source-id": "EMAIL_OPEN (9ff40077-4b32-3ec5-9aea-d69fd12683b9)", "source-label": null, "updated-by-user-id": null, "timestamp": 1616173245743, "selected": false, "property": "ip_country", "vid": 501}, "emitted_at": 1655361780821} +{"stream": "property_history", "data": {"value": "contacts-lifecycle-pipeline", "source-type": "MIGRATION", "source-id": "BackfillHsPipelineForContacts", "source-label": null, "updated-by-user-id": null, "timestamp": 1627942377946, "selected": false, "property": "hs_pipeline", "vid": 501}, "emitted_at": 1655361780821} +{"stream": "property_history", "data": {"value": "1621592560659", "source-type": "DEALS", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1621592560659, "selected": false, "property": "hs_lifecyclestage_opportunity_date", "vid": 501}, "emitted_at": 1655361780821} +{"stream": "property_history", "data": {"value": "0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1616173119734, "selected": false, "property": "hs_analytics_num_visits", "vid": 501}, "emitted_at": 1655361780821} +{"stream": "property_history", "data": {"value": "ca", "source-type": "HEISENBERG", "source-id": "EMAIL_OPEN (9ff40077-4b32-3ec5-9aea-d69fd12683b9)", "source-label": null, "updated-by-user-id": null, "timestamp": 1616173245743, "selected": false, "property": "ip_state_code", "vid": 501}, "emitted_at": 1655361780821} +{"stream": "property_history", "data": {"value": "false", "source-type": "MIGRATION", "source-id": "MarketableContactsAuditJob", "source-label": null, "updated-by-user-id": null, "timestamp": 1616735774440, "selected": false, "property": "hs_marketable_until_renewal", "vid": 501}, "emitted_at": 1655361780821} +{"stream": "property_history", "data": {"value": "false", "source-type": "MIGRATION", "source-id": "MarketableContactsAuditJob", "source-label": null, "updated-by-user-id": null, "timestamp": 1616735774440, "selected": false, "property": "hs_marketable_status", "vid": 501}, "emitted_at": 1655361780821} +{"stream": "property_history", "data": {"value": "OFFLINE", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1616173119734, "selected": false, "property": "hs_analytics_source", "vid": 501}, "emitted_at": 1655361780822} +{"stream": "property_history", "data": {"value": "1111111111", "source-type": "MIGRATION", "source-id": "BackfillReadtimeCalculatedPropertiesJob", "source-label": null, "updated-by-user-id": null, "timestamp": 1628718463905, "selected": false, "property": "hs_searchable_calculated_phone_number", "vid": 501}, "emitted_at": 1655361780822} +{"stream": "property_history", "data": {"value": "gmail.com", "source-type": "MIGRATION", "source-id": "BackfillReadtimeCalculatedPropertiesJob", "source-label": null, "updated-by-user-id": null, "timestamp": 1628718463905, "selected": false, "property": "hs_email_domain", "vid": 501}, "emitted_at": 1655361780822} +{"stream": "property_history", "data": {"value": "52550153", "source-type": "CALCULATED", "source-id": "PermissionsUpdater", "source-label": null, "updated-by-user-id": null, "timestamp": 1616173107402, "selected": false, "property": "hs_all_owner_ids", "vid": 501}, "emitted_at": 1655361780822} +{"stream": "property_history", "data": {"value": "kulak.eugene@gmail.com", "source-type": "CONTACTS", "source-id": "CRM_UI", "source-label": null, "updated-by-user-id": 12282590, "timestamp": 1616173106539, "selected": false, "property": "email", "vid": 501}, "emitted_at": 1655361780822} +{"stream": "property_history", "data": {"value": "us", "source-type": "HEISENBERG", "source-id": "EMAIL_OPEN (9ff40077-4b32-3ec5-9aea-d69fd12683b9)", "source-label": null, "updated-by-user-id": null, "timestamp": 1616173245743, "selected": false, "property": "ip_country_code", "vid": 501}, "emitted_at": 1655361780822} +{"stream": "property_history", "data": {"value": "1616173106523", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1616173119734, "selected": false, "property": "hs_analytics_first_timestamp", "vid": 501}, "emitted_at": 1655361780822} +{"stream": "property_history", "data": {"value": "12282590", "source-type": "CALCULATED", "source-id": "PermissionsUpdater", "source-label": null, "updated-by-user-id": null, "timestamp": 1616173107402, "selected": false, "property": "hs_user_ids_of_all_owners", "vid": 501}, "emitted_at": 1655361780822} +{"stream": "property_history", "data": {"value": "tier_4", "source-type": "CALCULATED", "source-id": null, "source-label": "HubSpot Predictive Contact Scoring Model", "updated-by-user-id": null, "timestamp": 1637664418422, "selected": false, "property": "hs_predictivescoringtier", "vid": 501}, "emitted_at": 1655361780823} +{"stream": "property_history", "data": {"value": "tier_3", "source-type": "CALCULATED", "source-id": null, "source-label": "HubSpot Predictive Contact Scoring Model", "updated-by-user-id": null, "timestamp": 1621592598371, "selected": false, "property": "hs_predictivescoringtier", "vid": 501}, "emitted_at": 1655361780823} +{"stream": "property_history", "data": {"value": "tier_1", "source-type": "CALCULATED", "source-id": null, "source-label": "HubSpot Predictive Contact Scoring Model", "updated-by-user-id": null, "timestamp": 1616173139169, "selected": false, "property": "hs_predictivescoringtier", "vid": 501}, "emitted_at": 1655361780823} +{"stream": "property_history", "data": {"value": "mountain view", "source-type": "HEISENBERG", "source-id": "EMAIL_OPEN (9ff40077-4b32-3ec5-9aea-d69fd12683b9)", "source-label": null, "updated-by-user-id": null, "timestamp": 1616173245743, "selected": false, "property": "ip_city", "vid": 501}, "emitted_at": 1655361780823} +{"stream": "property_history", "data": {"value": "1616173106523", "source-type": "CRM_UI", "source-id": "userId:12282590", "source-label": null, "updated-by-user-id": 12282590, "timestamp": 1616173106523, "selected": false, "property": "hubspot_owner_assigneddate", "vid": 501}, "emitted_at": 1655361780823} +{"stream": "property_history", "data": {"value": "CRM_UI", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1616173119734, "selected": false, "property": "hs_analytics_source_data_2", "vid": 501}, "emitted_at": 1655361780823} +{"stream": "property_history", "data": {"value": "CONTACTS", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1616173119734, "selected": false, "property": "hs_analytics_source_data_1", "vid": 501}, "emitted_at": 1655361780823} +{"stream": "property_history", "data": {"value": "opportunity", "source-type": "DEALS", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1621592560659, "selected": false, "property": "lifecyclestage", "vid": 501}, "emitted_at": 1655361780823} +{"stream": "property_history", "data": {"value": "subscriber", "source-type": "CRM_UI", "source-id": "userId:12282590", "source-label": null, "updated-by-user-id": 12282590, "timestamp": 1616173106523, "selected": false, "property": "lifecyclestage", "vid": 501}, "emitted_at": 1655361780824} +{"stream": "property_history", "data": {"value": "1", "source-type": "CALCULATED", "source-id": "CalculatedPropertyComputer", "source-label": null, "updated-by-user-id": null, "timestamp": 1616173106539, "selected": false, "property": "hs_count_is_unworked", "vid": 501}, "emitted_at": 1655361780824} +{"stream": "property_history", "data": {"value": "API", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044975559, "selected": false, "property": "hs_latest_source_data_1", "vid": 601}, "emitted_at": 1655361780824} +{"stream": "property_history", "data": {"value": "true", "source-type": "CALCULATED", "source-id": "CalculatedPropertyComputer", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970930, "selected": false, "property": "hs_is_unworked", "vid": 601}, "emitted_at": 1655361780824} +{"stream": "property_history", "data": {"value": "test contact 0", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970920, "selected": false, "property": "firstname", "vid": 601}, "emitted_at": 1655361780824} +{"stream": "property_history", "data": {"value": "Cambridge", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970920, "selected": false, "property": "city", "vid": 601}, "emitted_at": 1655361780824} +{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044971482, "selected": false, "property": "num_unique_conversion_events", "vid": 601}, "emitted_at": 1655361780824} +{"stream": "property_history", "data": {"value": "OFFLINE", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044973755, "selected": false, "property": "hs_latest_source", "vid": 601}, "emitted_at": 1655361780825} +{"stream": "property_history", "data": {"value": "contacts-lifecycle-pipeline", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970930, "selected": false, "property": "hs_pipeline", "vid": 601}, "emitted_at": 1655361780825} +{"stream": "property_history", "data": {"value": "0.0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044973755, "selected": false, "property": "hs_analytics_revenue", "vid": 601}, "emitted_at": 1655361780825} +{"stream": "property_history", "data": {"value": "1634044970930", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970930, "selected": false, "property": "createdate", "vid": 601}, "emitted_at": 1655361780825} +{"stream": "property_history", "data": {"value": "0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044973755, "selected": false, "property": "hs_analytics_num_visits", "vid": 601}, "emitted_at": 1655361780825} +{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": "RollupProperties", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044972778, "selected": false, "property": "hs_sequences_actively_enrolled_count", "vid": 601}, "emitted_at": 1655361780825} +{"stream": "property_history", "data": {"value": "false", "source-type": "MIGRATION", "source-id": "BulkSetMarketableStatusProcessor", "source-label": null, "updated-by-user-id": null, "timestamp": 1635860523515, "selected": false, "property": "hs_marketable_until_renewal", "vid": 601}, "emitted_at": 1655361780825} +{"stream": "property_history", "data": {"value": "false", "source-type": "MIGRATION", "source-id": "BulkSetMarketableStatusProcessor", "source-label": null, "updated-by-user-id": null, "timestamp": 1635860523515, "selected": false, "property": "hs_marketable_status", "vid": 601}, "emitted_at": 1655361780826} +{"stream": "property_history", "data": {"value": "OFFLINE", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044973755, "selected": false, "property": "hs_analytics_source", "vid": 601}, "emitted_at": 1655361780826} +{"stream": "property_history", "data": {"value": "1634044970930", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970930, "selected": false, "property": "hs_date_entered_subscriber", "vid": 601}, "emitted_at": 1655361780826} +{"stream": "property_history", "data": {"value": "5551222323", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970930, "selected": false, "property": "hs_searchable_calculated_phone_number", "vid": 601}, "emitted_at": 1655361780826} +{"stream": "property_history", "data": {"value": "hubspot.com", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970930, "selected": false, "property": "hs_email_domain", "vid": 601}, "emitted_at": 1655361780826} +{"stream": "property_history", "data": {"value": "0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044973755, "selected": false, "property": "hs_analytics_num_page_views", "vid": 601}, "emitted_at": 1655361780826} +{"stream": "property_history", "data": {"value": "HubSpot Test", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970920, "selected": false, "property": "company", "vid": 601}, "emitted_at": 1655361780826} +{"stream": "property_history", "data": {"value": "MA", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970920, "selected": false, "property": "state", "vid": 601}, "emitted_at": 1655361780826} +{"stream": "property_history", "data": {"value": "testingapicontact_0@hubspot.com", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970930, "selected": false, "property": "email", "vid": 601}, "emitted_at": 1655361780827} +{"stream": "property_history", "data": {"value": "02139", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970920, "selected": false, "property": "zip", "vid": 601}, "emitted_at": 1655361780827} +{"stream": "property_history", "data": {"value": "http://hubspot.com", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970920, "selected": false, "property": "website", "vid": 601}, "emitted_at": 1655361780827} +{"stream": "property_history", "data": {"value": "0 First Street", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970920, "selected": false, "property": "address", "vid": 601}, "emitted_at": 1655361780827} +{"stream": "property_history", "data": {"value": "1634044970930", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044973755, "selected": false, "property": "hs_analytics_first_timestamp", "vid": 601}, "emitted_at": 1655361780827} +{"stream": "property_history", "data": {"value": "1634044970930", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970930, "selected": false, "property": "hs_lifecyclestage_subscriber_date", "vid": 601}, "emitted_at": 1655361780827} +{"stream": "property_history", "data": {"value": "0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044973755, "selected": false, "property": "hs_analytics_average_page_views", "vid": 601}, "emitted_at": 1655361780827} +{"stream": "property_history", "data": {"value": "testerson number 0", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970920, "selected": false, "property": "lastname", "vid": 601}, "emitted_at": 1655361780827} +{"stream": "property_history", "data": {"value": "601", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970930, "selected": false, "property": "hs_all_contact_vids", "vid": 601}, "emitted_at": 1655361780828} +{"stream": "property_history", "data": {"value": "tier_3", "source-type": "CALCULATED", "source-id": null, "source-label": "HubSpot Predictive Contact Scoring Model", "updated-by-user-id": null, "timestamp": 1638773574294, "selected": false, "property": "hs_predictivescoringtier", "vid": 601}, "emitted_at": 1655361780828} +{"stream": "property_history", "data": {"value": "tier_1", "source-type": "CALCULATED", "source-id": null, "source-label": "HubSpot Predictive Contact Scoring Model", "updated-by-user-id": null, "timestamp": 1637664427573, "selected": false, "property": "hs_predictivescoringtier", "vid": 601}, "emitted_at": 1655361780828} +{"stream": "property_history", "data": {"value": "555-122-2323", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970920, "selected": false, "property": "phone", "vid": 601}, "emitted_at": 1655361780828} +{"stream": "property_history", "data": {"value": "3.79", "source-type": "CALCULATED", "source-id": null, "source-label": "HubSpot Predictive Contact Scoring Model", "updated-by-user-id": null, "timestamp": 1637664427573, "selected": false, "property": "hs_predictivecontactscore_v2", "vid": 601}, "emitted_at": 1655361780828} +{"stream": "property_history", "data": {"value": "true", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970930, "selected": false, "property": "hs_is_contact", "vid": 601}, "emitted_at": 1655361780828} +{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044971482, "selected": false, "property": "num_conversion_events", "vid": 601}, "emitted_at": 1655361780828} +{"stream": "property_history", "data": {"value": "601", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970930, "selected": false, "property": "hs_object_id", "vid": 601}, "emitted_at": 1655361780828} +{"stream": "property_history", "data": {"value": "0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044973755, "selected": false, "property": "hs_analytics_num_event_completions", "vid": 601}, "emitted_at": 1655361780829} +{"stream": "property_history", "data": {"value": "API", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044975559, "selected": false, "property": "hs_analytics_source_data_1", "vid": 601}, "emitted_at": 1655361780829} +{"stream": "property_history", "data": {"value": "subscriber", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044970930, "selected": false, "property": "lifecyclestage", "vid": 601}, "emitted_at": 1655361780829} +{"stream": "property_history", "data": {"value": "API", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044991422, "selected": false, "property": "hs_latest_source_data_1", "vid": 1101}, "emitted_at": 1655361780829} +{"stream": "property_history", "data": {"value": "true", "source-type": "CALCULATED", "source-id": "CalculatedPropertyComputer", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988089, "selected": false, "property": "hs_is_unworked", "vid": 1101}, "emitted_at": 1655361780829} +{"stream": "property_history", "data": {"value": "test contact 10", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988081, "selected": false, "property": "firstname", "vid": 1101}, "emitted_at": 1655361780829} +{"stream": "property_history", "data": {"value": "Cambridge", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988081, "selected": false, "property": "city", "vid": 1101}, "emitted_at": 1655361780829} +{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988327, "selected": false, "property": "num_unique_conversion_events", "vid": 1101}, "emitted_at": 1655361780830} +{"stream": "property_history", "data": {"value": "OFFLINE", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044991422, "selected": false, "property": "hs_latest_source", "vid": 1101}, "emitted_at": 1655361780830} +{"stream": "property_history", "data": {"value": "contacts-lifecycle-pipeline", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988089, "selected": false, "property": "hs_pipeline", "vid": 1101}, "emitted_at": 1655361780830} +{"stream": "property_history", "data": {"value": "0.0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044991422, "selected": false, "property": "hs_analytics_revenue", "vid": 1101}, "emitted_at": 1655361780830} +{"stream": "property_history", "data": {"value": "1634044988089", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988089, "selected": false, "property": "createdate", "vid": 1101}, "emitted_at": 1655361780830} +{"stream": "property_history", "data": {"value": "0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044991422, "selected": false, "property": "hs_analytics_num_visits", "vid": 1101}, "emitted_at": 1655361780830} +{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": "RollupProperties", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044989056, "selected": false, "property": "hs_sequences_actively_enrolled_count", "vid": 1101}, "emitted_at": 1655361780831} +{"stream": "property_history", "data": {"value": "false", "source-type": "MIGRATION", "source-id": "BulkSetMarketableStatusProcessor", "source-label": null, "updated-by-user-id": null, "timestamp": 1635860523515, "selected": false, "property": "hs_marketable_until_renewal", "vid": 1101}, "emitted_at": 1655361780831} +{"stream": "property_history", "data": {"value": "false", "source-type": "MIGRATION", "source-id": "BulkSetMarketableStatusProcessor", "source-label": null, "updated-by-user-id": null, "timestamp": 1635860523515, "selected": false, "property": "hs_marketable_status", "vid": 1101}, "emitted_at": 1655361780831} +{"stream": "property_history", "data": {"value": "OFFLINE", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044991422, "selected": false, "property": "hs_analytics_source", "vid": 1101}, "emitted_at": 1655361780831} +{"stream": "property_history", "data": {"value": "1634044988089", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988089, "selected": false, "property": "hs_date_entered_subscriber", "vid": 1101}, "emitted_at": 1655361780831} +{"stream": "property_history", "data": {"value": "5551222323", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988089, "selected": false, "property": "hs_searchable_calculated_phone_number", "vid": 1101}, "emitted_at": 1655361780831} +{"stream": "property_history", "data": {"value": "hubspot.com", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988089, "selected": false, "property": "hs_email_domain", "vid": 1101}, "emitted_at": 1655361780831} +{"stream": "property_history", "data": {"value": "0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044991422, "selected": false, "property": "hs_analytics_num_page_views", "vid": 1101}, "emitted_at": 1655361780832} +{"stream": "property_history", "data": {"value": "HubSpot Test", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988081, "selected": false, "property": "company", "vid": 1101}, "emitted_at": 1655361780832} +{"stream": "property_history", "data": {"value": "MA", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988081, "selected": false, "property": "state", "vid": 1101}, "emitted_at": 1655361780832} +{"stream": "property_history", "data": {"value": "testingapicontact_10@hubspot.com", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988089, "selected": false, "property": "email", "vid": 1101}, "emitted_at": 1655361780832} +{"stream": "property_history", "data": {"value": "02139", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988081, "selected": false, "property": "zip", "vid": 1101}, "emitted_at": 1655361780832} +{"stream": "property_history", "data": {"value": "http://hubspot.com", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988081, "selected": false, "property": "website", "vid": 1101}, "emitted_at": 1655361780832} +{"stream": "property_history", "data": {"value": "10 First Street", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988081, "selected": false, "property": "address", "vid": 1101}, "emitted_at": 1655361780832} +{"stream": "property_history", "data": {"value": "1634044988089", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044991422, "selected": false, "property": "hs_analytics_first_timestamp", "vid": 1101}, "emitted_at": 1655361780832} +{"stream": "property_history", "data": {"value": "1634044988089", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988089, "selected": false, "property": "hs_lifecyclestage_subscriber_date", "vid": 1101}, "emitted_at": 1655361780833} +{"stream": "property_history", "data": {"value": "0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044991422, "selected": false, "property": "hs_analytics_average_page_views", "vid": 1101}, "emitted_at": 1655361780833} +{"stream": "property_history", "data": {"value": "testerson number 10", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988081, "selected": false, "property": "lastname", "vid": 1101}, "emitted_at": 1655361780833} +{"stream": "property_history", "data": {"value": "1101", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988089, "selected": false, "property": "hs_all_contact_vids", "vid": 1101}, "emitted_at": 1655361780833} +{"stream": "property_history", "data": {"value": "tier_3", "source-type": "CALCULATED", "source-id": null, "source-label": "HubSpot Predictive Contact Scoring Model", "updated-by-user-id": null, "timestamp": 1638773545288, "selected": false, "property": "hs_predictivescoringtier", "vid": 1101}, "emitted_at": 1655361780833} +{"stream": "property_history", "data": {"value": "tier_1", "source-type": "CALCULATED", "source-id": null, "source-label": "HubSpot Predictive Contact Scoring Model", "updated-by-user-id": null, "timestamp": 1637664436857, "selected": false, "property": "hs_predictivescoringtier", "vid": 1101}, "emitted_at": 1655361780833} +{"stream": "property_history", "data": {"value": "555-122-2323", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988081, "selected": false, "property": "phone", "vid": 1101}, "emitted_at": 1655361780833} +{"stream": "property_history", "data": {"value": "3.79", "source-type": "CALCULATED", "source-id": null, "source-label": "HubSpot Predictive Contact Scoring Model", "updated-by-user-id": null, "timestamp": 1637664436857, "selected": false, "property": "hs_predictivecontactscore_v2", "vid": 1101}, "emitted_at": 1655361780833} +{"stream": "property_history", "data": {"value": "true", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988089, "selected": false, "property": "hs_is_contact", "vid": 1101}, "emitted_at": 1655361780834} +{"stream": "property_history", "data": {"value": "0", "source-type": "CALCULATED", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988327, "selected": false, "property": "num_conversion_events", "vid": 1101}, "emitted_at": 1655361780834} +{"stream": "property_history", "data": {"value": "1101", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988089, "selected": false, "property": "hs_object_id", "vid": 1101}, "emitted_at": 1655361780834} +{"stream": "property_history", "data": {"value": "0", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044991422, "selected": false, "property": "hs_analytics_num_event_completions", "vid": 1101}, "emitted_at": 1655361780834} +{"stream": "property_history", "data": {"value": "API", "source-type": "ANALYTICS", "source-id": "ContactAnalyticsDetailsUpdateWorker", "source-label": null, "updated-by-user-id": null, "timestamp": 1634044991422, "selected": false, "property": "hs_analytics_source_data_1", "vid": 1101}, "emitted_at": 1655361780834} +{"stream": "property_history", "data": {"value": "subscriber", "source-type": "API", "source-id": null, "source-label": null, "updated-by-user-id": null, "timestamp": 1634044988089, "selected": false, "property": "lifecyclestage", "vid": 1101}, "emitted_at": 1655361780834} +{"stream": "campaigns", "data": {"id": 115429485, "lastUpdatedTime": 1615506409286, "appId": 113, "appName": "Batch", "contentId": 42931043849, "subject": "Test subj", "name": "Test subj", "counters": {"processed": 1, "deferred": 1, "mta_dropped": 1, "dropped": 3, "sent": 0}, "lastProcessingFinishedAt": 1615504712000, "lastProcessingStartedAt": 1615504687000, "lastProcessingStateChangeAt": 1615504712000, "numIncluded": 3, "processingState": "DONE", "type": "BATCH_EMAIL"}, "emitted_at": 1655280715948} +{"stream": "companies", "data": {"id": "5000787595", "properties": {"about_us": null, "address": "2261 Market Street", "address2": null, "annualrevenue": null, "city": "San Francisco", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "United States", "createdate": "2020-12-11T01:28:27.673000+00:00", "days_to_close": null, "description": "Airbyte is an open-source data integration platform to build ELT pipelines. Consolidate your data in your data warehouses, lakes and databases.", "domain": "Daxtarity.com", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": "2020-12-11T01:29:29.115000+00:00", "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": "2020", "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": "52550153", "hs_all_team_ids": null, "hs_analytics_first_timestamp": "2020-12-11T01:29:29.115000+00:00", "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": 0.0, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": 0.0, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": "OFFLINE", "hs_analytics_source_data_1": "CONTACTS", "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": "CRM_UI", "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": 12282590, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2022-05-02T11:15:24.476000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 5000787595, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": 0.62, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.38625118136405945, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": 12282590, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": "12282590", "hubspot_owner_assigneddate": "2020-12-11T01:28:27.673000+00:00", "hubspot_owner_id": "52550153", "hubspot_team_id": null, "hubspotscore": null, "industry": null, "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/airbytehq", "linkedinbio": "Airbyte is an open-source data integration platform to build ELT pipelines. Consolidate your data in your data warehouses, lakes and databases.", "name": "Daxtarity", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 1.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": 50.0, "phone": "+1 415-307-4864", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "CA", "timezone": "America/Los_Angeles", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": "AirbyteHQ", "type": null, "web_technologies": "slack;google_tag_manager;google_analytics;intercom;lever;google_apps;facebook_advertiser", "website": "Daxtarity.com", "zip": "94114"}, "createdAt": "2020-12-11T01:28:27.673Z", "updatedAt": "2022-05-02T11:15:24.476Z", "archived": false, "contacts": ["101", "101"]}, "emitted_at": 1655280716894} +{"stream": "companies", "data": {"id": "5170561229", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": null, "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": null, "createdate": "2021-01-13T10:25:46.574000+00:00", "days_to_close": null, "description": "Test company description", "domain": null, "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": "2021-01-14T14:26:17.014000+00:00", "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": "2021-01-14T14:26:17.014000+00:00", "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": 0.0, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": 0.0, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": "OFFLINE", "hs_analytics_source_data_1": "API", "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-11-23T10:49:20.517000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 5170561229, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": 2.01, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.5596858859062195, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": 12282590, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": null, "is_public": null, "lifecyclestage": null, "linkedin_company_page": null, "linkedinbio": null, "name": "Test company name2", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 2.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": null, "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": null, "timezone": null, "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": null, "website": null, "zip": null}, "createdAt": "2021-01-13T10:25:46.574Z", "updatedAt": "2021-11-23T10:49:20.517Z", "archived": false, "contacts": ["201", "251", "201", "251"]}, "emitted_at": 1655280716894} +{"stream": "companies", "data": {"id": "5183330928", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": null, "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": null, "createdate": "2021-01-14T14:22:28.333000+00:00", "days_to_close": null, "description": "New company test 8", "domain": null, "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-08-04T00:53:24.019000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 5183330928, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": null, "is_public": null, "lifecyclestage": null, "linkedin_company_page": null, "linkedinbio": null, "name": "New company test 8", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": null, "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": null, "timezone": null, "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": null, "website": null, "zip": null}, "createdAt": "2021-01-14T14:22:28.333Z", "updatedAt": "2021-08-04T00:53:24.019Z", "archived": false}, "emitted_at": 1655280716895} +{"stream": "companies", "data": {"id": "5183425390", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": null, "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": null, "createdate": "2021-01-14T14:20:55.910000+00:00", "days_to_close": null, "description": "New company test", "domain": null, "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-08-02T10:42:01.267000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 5183425390, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": null, "is_public": null, "lifecyclestage": null, "linkedin_company_page": null, "linkedinbio": null, "name": "New company test", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": null, "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": null, "timezone": null, "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": null, "website": null, "zip": null}, "createdAt": "2021-01-14T14:20:55.910Z", "updatedAt": "2021-08-02T10:42:01.267Z", "archived": false}, "emitted_at": 1655280716896} +{"stream": "companies", "data": {"id": "5183445016", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": null, "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": null, "createdate": "2021-01-14T14:22:27.110000+00:00", "days_to_close": null, "description": "New company test 5", "domain": null, "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-07-31T17:46:28.714000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 5183445016, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": null, "is_public": null, "lifecyclestage": null, "linkedin_company_page": null, "linkedinbio": null, "name": "New company test 5", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": null, "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": null, "timezone": null, "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": null, "website": null, "zip": null}, "createdAt": "2021-01-14T14:22:27.110Z", "updatedAt": "2021-07-31T17:46:28.714Z", "archived": false}, "emitted_at": 1655280716896} +{"stream": "companies", "data": {"id": "5183445018", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": null, "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": null, "createdate": "2021-01-14T14:22:27.996000+00:00", "days_to_close": null, "description": "New company test 7", "domain": null, "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-07-31T03:39:46.295000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 5183445018, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": null, "is_public": null, "lifecyclestage": null, "linkedin_company_page": null, "linkedinbio": null, "name": "New company test 7", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": null, "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": null, "timezone": null, "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": null, "website": null, "zip": null}, "createdAt": "2021-01-14T14:22:27.996Z", "updatedAt": "2021-07-31T03:39:46.295Z", "archived": false}, "emitted_at": 1655280716897} +{"stream": "companies", "data": {"id": "5183450595", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": null, "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": null, "createdate": "2021-01-14T14:22:26.473000+00:00", "days_to_close": null, "description": "New company test 3", "domain": null, "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-07-31T03:22:01.327000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 5183450595, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": null, "is_public": null, "lifecyclestage": null, "linkedin_company_page": null, "linkedinbio": null, "name": "New company test 3", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": null, "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": null, "timezone": null, "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": null, "website": null, "zip": null}, "createdAt": "2021-01-14T14:22:26.473Z", "updatedAt": "2021-07-31T03:22:01.327Z", "archived": false}, "emitted_at": 1655280716897} +{"stream": "companies", "data": {"id": "5183450598", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": null, "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": null, "createdate": "2021-01-14T14:22:27.721000+00:00", "days_to_close": null, "description": "New company test 6", "domain": null, "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-08-01T14:47:18.061000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 5183450598, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": null, "is_public": null, "lifecyclestage": null, "linkedin_company_page": null, "linkedinbio": null, "name": "New company test 6", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": null, "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": null, "timezone": null, "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": null, "website": null, "zip": null}, "createdAt": "2021-01-14T14:22:27.721Z", "updatedAt": "2021-08-01T14:47:18.061Z", "archived": false}, "emitted_at": 1655280716898} +{"stream": "companies", "data": {"id": "5183450599", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": null, "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": null, "createdate": "2021-01-14T14:22:28.608000+00:00", "days_to_close": null, "description": "New company test 9", "domain": null, "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-07-31T15:29:45.110000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 5183450599, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": null, "is_public": null, "lifecyclestage": null, "linkedin_company_page": null, "linkedinbio": null, "name": "New company test 9", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": null, "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": null, "timezone": null, "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": null, "website": null, "zip": null}, "createdAt": "2021-01-14T14:22:28.608Z", "updatedAt": "2021-07-31T15:29:45.110Z", "archived": false}, "emitted_at": 1655280716898} +{"stream": "companies", "data": {"id": "5183457488", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": null, "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": null, "createdate": "2021-01-14T14:22:26.200000+00:00", "days_to_close": null, "description": "New company test 2", "domain": null, "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-08-05T07:47:18.903000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 5183457488, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": null, "is_public": null, "lifecyclestage": null, "linkedin_company_page": null, "linkedinbio": null, "name": "New company test 2", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": null, "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": null, "timezone": null, "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": null, "website": null, "zip": null}, "createdAt": "2021-01-14T14:22:26.200Z", "updatedAt": "2021-08-05T07:47:18.903Z", "archived": false}, "emitted_at": 1655280716898} +{"stream": "companies", "data": {"id": "5183457493", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": null, "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": null, "createdate": "2021-01-14T14:22:28.877000+00:00", "days_to_close": null, "description": "New company test 10", "domain": null, "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-08-02T01:06:25.358000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 5183457493, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": null, "is_public": null, "lifecyclestage": null, "linkedin_company_page": null, "linkedinbio": null, "name": "New company test 10", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": null, "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": null, "timezone": null, "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": null, "website": null, "zip": null}, "createdAt": "2021-01-14T14:22:28.877Z", "updatedAt": "2021-08-02T01:06:25.358Z", "archived": false}, "emitted_at": 1655280716899} +{"stream": "companies", "data": {"id": "5430797625", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": null, "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "USA", "createdate": "2021-02-22T14:11:21.438000+00:00", "days_to_close": null, "description": "New company test 23", "domain": null, "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-08-04T10:02:53.261000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 5430797625, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": null, "is_public": null, "lifecyclestage": null, "linkedin_company_page": null, "linkedinbio": null, "name": "New company test 23", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": null, "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": null, "timezone": null, "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": null, "website": null, "zip": null}, "createdAt": "2021-02-22T14:11:21.438Z", "updatedAt": "2021-08-04T10:02:53.261Z", "archived": false}, "emitted_at": 1655280716899} +{"stream": "companies", "data": {"id": "7097477541", "properties": {"about_us": null, "address": "25 First Street", "address2": null, "annualrevenue": null, "city": "Cambridge", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "United States", "createdate": "2021-10-12T12:45:14.039000+00:00", "days_to_close": null, "description": "The worldā€™s leading inbound marketing and sales platform. Since 2006, HubSpot has been on a mission to make the world more inbound. Today, 31,000+ customers in more than 90 countries use HubSpotā€™s software, services, and support to transform the way th...", "domain": "biglytics.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": "https://facebook.com/hubspot", "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": "2006", "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2022-03-31T13:29:57.223000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097477541, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Technology", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/biglytics", "linkedinbio": "The worldā€™s leading inbound marketing and sales platform. Since 2006, HubSpot has been on a mission to make the world more inbound. Today, 31,000+ customers in more than 90 countries use HubSpotā€™s software, services, and support to transform the way th...", "name": "Biglytics", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": 10.0, "phone": "(877) 929-0687", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "America/New_York", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": "HubSpot", "type": null, "web_technologies": "segment;google_tag_manager;google_analytics;piwik;google_apps;adroll;hubspot;facebook_advertiser", "website": "biglytics.net", "zip": "62515"}, "createdAt": "2021-10-12T12:45:14.039Z", "updatedAt": "2022-03-31T13:29:57.223Z", "archived": false}, "emitted_at": 1655280716901} +{"stream": "companies", "data": {"id": "7097478537", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 1", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:12.412000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:58:17.815000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097478537, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 1", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:12.412Z", "updatedAt": "2021-10-12T12:58:17.815Z", "archived": false}, "emitted_at": 1655280716901} +{"stream": "companies", "data": {"id": "7097478539", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 3", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:13.260000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:58:15.367000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097478539, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 3", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:13.260Z", "updatedAt": "2021-10-12T12:58:15.367Z", "archived": false}, "emitted_at": 1655280716902} +{"stream": "companies", "data": {"id": "7097478554", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 36", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:24.412000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:29.620000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097478554, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 36", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:24.412Z", "updatedAt": "2021-10-12T12:57:29.620Z", "archived": false}, "emitted_at": 1655280716902} +{"stream": "companies", "data": {"id": "7097478556", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 40", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:26.011000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:42.827000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097478556, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 40", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:26.011Z", "updatedAt": "2021-10-12T12:57:42.827Z", "archived": false}, "emitted_at": 1655280716903} +{"stream": "companies", "data": {"id": "7097478560", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 61", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:32.931000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:38.133000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097478560, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 61", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:32.931Z", "updatedAt": "2021-10-12T12:57:38.133Z", "archived": false}, "emitted_at": 1655280716903} +{"stream": "companies", "data": {"id": "7097478566", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 86", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:41.736000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:51.115000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097478566, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 86", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:41.736Z", "updatedAt": "2021-10-12T12:57:51.115Z", "archived": false}, "emitted_at": 1655280716904} +{"stream": "companies", "data": {"id": "7097484837", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 17", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:18.405000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:39.138000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097484837, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 17", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:18.405Z", "updatedAt": "2021-10-12T12:57:39.138Z", "archived": false}, "emitted_at": 1655280716904} +{"stream": "companies", "data": {"id": "7097484850", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 74", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:37.358000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:46.141000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097484850, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 74", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:37.358Z", "updatedAt": "2021-10-12T12:57:46.141Z", "archived": false}, "emitted_at": 1655280716904} +{"stream": "companies", "data": {"id": "7097484853", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 85", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:41.414000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:56.257000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097484853, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 85", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:41.414Z", "updatedAt": "2021-10-12T12:57:56.257Z", "archived": false}, "emitted_at": 1655280716905} +{"stream": "companies", "data": {"id": "7097484854", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 90", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:43.414000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:50.967000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097484854, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 90", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:43.414Z", "updatedAt": "2021-10-12T12:57:50.967Z", "archived": false}, "emitted_at": 1655280716905} +{"stream": "companies", "data": {"id": "7097484855", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 93", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:44.302000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:52.192000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097484855, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 93", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:44.302Z", "updatedAt": "2021-10-12T12:57:52.192Z", "archived": false}, "emitted_at": 1655280716906} +{"stream": "companies", "data": {"id": "7097755423", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 41", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:26.288000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:42.688000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097755423, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 41", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:26.288Z", "updatedAt": "2021-10-12T12:57:42.688Z", "archived": false}, "emitted_at": 1655280716907} +{"stream": "companies", "data": {"id": "7097755429", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 62", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:33.235000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:38.262000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097755429, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 62", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:33.235Z", "updatedAt": "2021-10-12T12:57:38.262Z", "archived": false}, "emitted_at": 1655280716907} +{"stream": "companies", "data": {"id": "7097760584", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 14", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:17.476000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:34.679000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097760584, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 14", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:17.476Z", "updatedAt": "2021-10-12T12:57:34.679Z", "archived": false}, "emitted_at": 1655280716908} +{"stream": "companies", "data": {"id": "7097760587", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 19", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:19.055000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:27.327000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097760587, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 19", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:19.055Z", "updatedAt": "2021-10-12T12:57:27.327Z", "archived": false}, "emitted_at": 1655280716908} +{"stream": "companies", "data": {"id": "7097760597", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 43", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:26.905000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:49.685000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097760597, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 43", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:26.905Z", "updatedAt": "2021-10-12T12:57:49.685Z", "archived": false}, "emitted_at": 1655280716909} +{"stream": "companies", "data": {"id": "7097760601", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 55", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:31.106000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:36.810000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097760601, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 55", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:31.106Z", "updatedAt": "2021-10-12T12:57:36.810Z", "archived": false}, "emitted_at": 1655280716909} +{"stream": "companies", "data": {"id": "7097760603", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 69", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:35.399000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:41.706000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097760603, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 69", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:35.399Z", "updatedAt": "2021-10-12T12:57:41.706Z", "archived": false}, "emitted_at": 1655280716909} +{"stream": "companies", "data": {"id": "7097760610", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 97", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:45.832000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:54.753000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097760610, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 97", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:45.832Z", "updatedAt": "2021-10-12T12:57:54.753Z", "archived": false}, "emitted_at": 1655280716910} +{"stream": "companies", "data": {"id": "7097763880", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 4", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:13.584000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:58:15.868000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097763880, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 4", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:13.584Z", "updatedAt": "2021-10-12T12:58:15.868Z", "archived": false}, "emitted_at": 1655280716910} +{"stream": "companies", "data": {"id": "7097763893", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 30", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:22.543000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:41.899000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097763893, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 30", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:22.543Z", "updatedAt": "2021-10-12T12:57:41.899Z", "archived": false}, "emitted_at": 1655280716911} +{"stream": "companies", "data": {"id": "7097763894", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 31", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:22.897000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:31.969000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097763894, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 31", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:22.897Z", "updatedAt": "2021-10-12T12:57:31.969Z", "archived": false}, "emitted_at": 1655280716911} +{"stream": "companies", "data": {"id": "7097763908", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 65", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:34.177000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:51.068000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097763908, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 65", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:34.177Z", "updatedAt": "2021-10-12T12:57:51.068Z", "archived": false}, "emitted_at": 1655280716912} +{"stream": "companies", "data": {"id": "7097763912", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 79", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:39.261000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:52.196000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097763912, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 79", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:39.261Z", "updatedAt": "2021-10-12T12:57:52.196Z", "archived": false}, "emitted_at": 1655280716912} +{"stream": "companies", "data": {"id": "7097767570", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 51", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:29.860000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:34.654000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097767570, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 51", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:29.860Z", "updatedAt": "2021-10-12T12:57:34.654Z", "archived": false}, "emitted_at": 1655280716912} +{"stream": "companies", "data": {"id": "7097767572", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 52", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:30.196000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:51.121000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097767572, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 52", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:30.196Z", "updatedAt": "2021-10-12T12:57:51.121Z", "archived": false}, "emitted_at": 1655280716913} +{"stream": "companies", "data": {"id": "7097767580", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 84", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:41.126000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:53.455000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097767580, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 84", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:41.126Z", "updatedAt": "2021-10-12T12:57:53.455Z", "archived": false}, "emitted_at": 1655280716913} +{"stream": "companies", "data": {"id": "7097767581", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 95", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:45.236000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:56.251000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097767581, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 95", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:45.236Z", "updatedAt": "2021-10-12T12:57:56.251Z", "archived": false}, "emitted_at": 1655280716914} +{"stream": "companies", "data": {"id": "7097772626", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 11", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:16.220000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:33.359000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097772626, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 11", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:16.220Z", "updatedAt": "2021-10-12T12:57:33.359Z", "archived": false}, "emitted_at": 1655280716914} +{"stream": "companies", "data": {"id": "7097772633", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 28", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:21.892000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:58:27.588000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097772633, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 28", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:21.892Z", "updatedAt": "2021-10-12T12:58:27.588Z", "archived": false}, "emitted_at": 1655280716915} +{"stream": "companies", "data": {"id": "7097772649", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 71", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:36.067000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:49.542000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097772649, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 71", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:36.067Z", "updatedAt": "2021-10-12T12:57:49.542Z", "archived": false}, "emitted_at": 1655280716915} +{"stream": "companies", "data": {"id": "7097772652", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 80", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:39.562000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:43.852000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097772652, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 80", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:39.562Z", "updatedAt": "2021-10-12T12:57:43.852Z", "archived": false}, "emitted_at": 1655280716915} +{"stream": "companies", "data": {"id": "7097772664", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 98", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:46.129000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:52.184000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097772664, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 98", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:46.129Z", "updatedAt": "2021-10-12T12:57:52.184Z", "archived": false}, "emitted_at": 1655280716916} +{"stream": "companies", "data": {"id": "7097776150", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 0", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:11.881000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:17.929000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097776150, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 0", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:11.881Z", "updatedAt": "2021-10-12T12:57:17.929Z", "archived": false}, "emitted_at": 1655280716916} +{"stream": "companies", "data": {"id": "7097776166", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 13", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:17.173000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:34.270000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097776166, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 13", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:17.173Z", "updatedAt": "2021-10-12T12:57:34.270Z", "archived": false}, "emitted_at": 1655280716917} +{"stream": "companies", "data": {"id": "7097776168", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 25", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:20.841000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:58:26.095000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097776168, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 25", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:20.841Z", "updatedAt": "2021-10-12T12:58:26.095Z", "archived": false}, "emitted_at": 1655280716917} +{"stream": "companies", "data": {"id": "7097776173", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 34", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:23.788000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:58:29.802000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097776173, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 34", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:23.788Z", "updatedAt": "2021-10-12T12:58:29.802Z", "archived": false}, "emitted_at": 1655280716918} +{"stream": "companies", "data": {"id": "7097776179", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 45", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:27.833000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:58:27.754000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097776179, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 45", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:27.833Z", "updatedAt": "2021-10-12T12:58:27.754Z", "archived": false}, "emitted_at": 1655280716918} +{"stream": "companies", "data": {"id": "7097776185", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 68", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:35.100000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:41.708000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097776185, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 68", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:35.100Z", "updatedAt": "2021-10-12T12:57:41.708Z", "archived": false}, "emitted_at": 1655280716919} +{"stream": "companies", "data": {"id": "7097779831", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 54", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:30.826000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:49.773000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097779831, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 54", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:30.826Z", "updatedAt": "2021-10-12T12:57:49.773Z", "archived": false}, "emitted_at": 1655280716919} +{"stream": "companies", "data": {"id": "7097779832", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 57", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:31.736000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:58:30.443000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097779832, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 57", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:31.736Z", "updatedAt": "2021-10-12T12:58:30.443Z", "archived": false}, "emitted_at": 1655280716919} +{"stream": "companies", "data": {"id": "7097779841", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 96", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:45.538000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:50.079000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097779841, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 96", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:45.538Z", "updatedAt": "2021-10-12T12:57:50.079Z", "archived": false}, "emitted_at": 1655280716920} +{"stream": "companies", "data": {"id": "7097783388", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 56", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:31.422000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:39.082000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097783388, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 56", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:31.422Z", "updatedAt": "2021-10-12T12:57:39.082Z", "archived": false}, "emitted_at": 1655280716920} +{"stream": "companies", "data": {"id": "7097783389", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 59", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:32.327000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:58:26.403000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097783389, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 59", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:32.327Z", "updatedAt": "2021-10-12T12:58:26.403Z", "archived": false}, "emitted_at": 1655280716921} +{"stream": "companies", "data": {"id": "7097783392", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 72", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:36.397000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:52.184000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097783392, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 72", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:36.397Z", "updatedAt": "2021-10-12T12:57:52.184Z", "archived": false}, "emitted_at": 1655280716921} +{"stream": "companies", "data": {"id": "7097783394", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 92", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:43.994000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:54.757000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097783394, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 92", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:43.994Z", "updatedAt": "2021-10-12T12:57:54.757Z", "archived": false}, "emitted_at": 1655280716922} +{"stream": "companies", "data": {"id": "7097787471", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 2", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:12.890000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:34.655000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097787471, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 2", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:12.890Z", "updatedAt": "2021-10-12T12:57:34.655Z", "archived": false}, "emitted_at": 1655280716922} +{"stream": "companies", "data": {"id": "7097787481", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 23", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:20.273000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:28.516000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097787481, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 23", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:20.273Z", "updatedAt": "2021-10-12T12:57:28.516Z", "archived": false}, "emitted_at": 1655280716922} +{"stream": "companies", "data": {"id": "7097787486", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 33", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:23.511000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:41.902000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097787486, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 33", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:23.511Z", "updatedAt": "2021-10-12T12:57:41.902Z", "archived": false}, "emitted_at": 1655280716923} +{"stream": "companies", "data": {"id": "7097787497", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 66", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:34.455000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:51.397000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097787497, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 66", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:34.455Z", "updatedAt": "2021-10-12T12:57:51.397Z", "archived": false}, "emitted_at": 1655280716923} +{"stream": "companies", "data": {"id": "7097787500", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 89", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:43.019000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:53.376000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097787500, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 89", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:43.019Z", "updatedAt": "2021-10-12T12:57:53.376Z", "archived": false}, "emitted_at": 1655280716924} +{"stream": "companies", "data": {"id": "7097791039", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 6", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:14.625000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:58:19.276000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097791039, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 6", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:14.625Z", "updatedAt": "2021-10-12T12:58:19.276Z", "archived": false}, "emitted_at": 1655280716924} +{"stream": "companies", "data": {"id": "7097791040", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 9", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:15.588000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:58:20.344000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097791040, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 9", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:15.588Z", "updatedAt": "2021-10-12T12:58:20.344Z", "archived": false}, "emitted_at": 1655280716925} +{"stream": "companies", "data": {"id": "7097791047", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 21", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:19.647000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:58:23.998000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097791047, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 21", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:19.647Z", "updatedAt": "2021-10-12T12:58:23.998Z", "archived": false}, "emitted_at": 1655280716925} +{"stream": "companies", "data": {"id": "7097791063", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 67", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:34.730000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:55.030000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097791063, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 67", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:34.730Z", "updatedAt": "2021-10-12T12:57:55.030Z", "archived": false}, "emitted_at": 1655280716925} +{"stream": "companies", "data": {"id": "7097791064", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 73", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:36.700000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:43.848000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097791064, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 73", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:36.700Z", "updatedAt": "2021-10-12T12:57:43.848Z", "archived": false}, "emitted_at": 1655280716926} +{"stream": "companies", "data": {"id": "7097791065", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 77", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:38.631000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:55.029000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097791065, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 77", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:38.631Z", "updatedAt": "2021-10-12T12:57:55.029Z", "archived": false}, "emitted_at": 1655280716927} +{"stream": "companies", "data": {"id": "7097791067", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 83", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:40.494000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:51.388000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097791067, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 83", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:40.494Z", "updatedAt": "2021-10-12T12:57:51.388Z", "archived": false}, "emitted_at": 1655280716927} +{"stream": "companies", "data": {"id": "7097791086", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 99", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:46.430000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:55.898000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097791086, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 99", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:46.430Z", "updatedAt": "2021-10-12T12:57:55.898Z", "archived": false}, "emitted_at": 1655280716927} +{"stream": "companies", "data": {"id": "7097794858", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 16", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:18.098000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:37.896000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097794858, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 16", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:18.098Z", "updatedAt": "2021-10-12T12:57:37.896Z", "archived": false}, "emitted_at": 1655280716928} +{"stream": "companies", "data": {"id": "7097794861", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 18", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:18.699000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:27.334000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097794861, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 18", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:18.699Z", "updatedAt": "2021-10-12T12:57:27.334Z", "archived": false}, "emitted_at": 1655280716928} +{"stream": "companies", "data": {"id": "7097794863", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 27", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:21.580000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:39.076000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097794863, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 27", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:21.580Z", "updatedAt": "2021-10-12T12:57:39.076Z", "archived": false}, "emitted_at": 1655280716929} +{"stream": "companies", "data": {"id": "7097794872", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 37", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:25.065000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:44.880000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097794872, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 37", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:25.065Z", "updatedAt": "2021-10-12T12:57:44.880Z", "archived": false}, "emitted_at": 1655280716929} +{"stream": "companies", "data": {"id": "7097794878", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 49", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:29.215000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:54.993000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097794878, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 49", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:29.215Z", "updatedAt": "2021-10-12T12:57:54.993Z", "archived": false}, "emitted_at": 1655280716930} +{"stream": "companies", "data": {"id": "7097794879", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 53", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:30.490000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:37.951000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097794879, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 53", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:30.490Z", "updatedAt": "2021-10-12T12:57:37.951Z", "archived": false}, "emitted_at": 1655280716930} +{"stream": "companies", "data": {"id": "7097794880", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 60", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:32.632000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:50.882000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097794880, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 60", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:32.632Z", "updatedAt": "2021-10-12T12:57:50.882Z", "archived": false}, "emitted_at": 1655280716930} +{"stream": "companies", "data": {"id": "7097794889", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 78", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:38.964000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:50.941000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097794889, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 78", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:38.964Z", "updatedAt": "2021-10-12T12:57:50.941Z", "archived": false}, "emitted_at": 1655280716931} +{"stream": "companies", "data": {"id": "7097798444", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 10", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:15.896000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:25.265000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097798444, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 10", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:15.896Z", "updatedAt": "2021-10-12T12:57:25.265Z", "archived": false}, "emitted_at": 1655280716931} +{"stream": "companies", "data": {"id": "7097798464", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 50", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:29.560000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:49.852000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097798464, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 50", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:29.560Z", "updatedAt": "2021-10-12T12:57:49.852Z", "archived": false}, "emitted_at": 1655280716932} +{"stream": "companies", "data": {"id": "7097798467", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 75", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:37.653000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:46.037000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097798467, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 75", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:37.653Z", "updatedAt": "2021-10-12T12:57:46.037Z", "archived": false}, "emitted_at": 1655280716932} +{"stream": "companies", "data": {"id": "7097798469", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 87", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:42.049000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:54.614000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097798469, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 87", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:42.049Z", "updatedAt": "2021-10-12T12:57:54.614Z", "archived": false}, "emitted_at": 1655280716933} +{"stream": "companies", "data": {"id": "7097802020", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 15", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:17.792000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:39.415000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097802020, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 15", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:17.792Z", "updatedAt": "2021-10-12T12:57:39.415Z", "archived": false}, "emitted_at": 1655280716933} +{"stream": "companies", "data": {"id": "7097802034", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 48", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:28.840000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:58:27.314000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097802034, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 48", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:28.840Z", "updatedAt": "2021-10-12T12:58:27.314Z", "archived": false}, "emitted_at": 1655280716934} +{"stream": "companies", "data": {"id": "7097802036", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 70", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:35.700000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:49.774000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097802036, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 70", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:35.700Z", "updatedAt": "2021-10-12T12:57:49.774Z", "archived": false}, "emitted_at": 1655280716934} +{"stream": "companies", "data": {"id": "7097805635", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 7", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:14.927000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:23.747000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097805635, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 7", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:14.927Z", "updatedAt": "2021-10-12T12:57:23.747Z", "archived": false}, "emitted_at": 1655280716934} +{"stream": "companies", "data": {"id": "7097805640", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 20", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:19.356000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:37.897000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097805640, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 20", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:19.356Z", "updatedAt": "2021-10-12T12:57:37.897Z", "archived": false}, "emitted_at": 1655280716935} +{"stream": "companies", "data": {"id": "7097805645", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 35", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:24.130000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:29.733000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097805645, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 35", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:24.130Z", "updatedAt": "2021-10-12T12:57:29.733Z", "archived": false}, "emitted_at": 1655280716935} +{"stream": "companies", "data": {"id": "7097805655", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 81", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:39.893000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:58:45.649000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097805655, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 81", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:39.893Z", "updatedAt": "2021-10-12T12:58:45.649Z", "archived": false}, "emitted_at": 1655280716936} +{"stream": "companies", "data": {"id": "7097809515", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 5", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:13.929000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:21.758000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097809515, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 5", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:13.929Z", "updatedAt": "2021-10-12T12:57:21.758Z", "archived": false}, "emitted_at": 1655280716936} +{"stream": "companies", "data": {"id": "7097809526", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 29", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:22.191000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:40.740000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097809526, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 29", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:22.191Z", "updatedAt": "2021-10-12T12:57:40.740Z", "archived": false}, "emitted_at": 1655280716937} +{"stream": "companies", "data": {"id": "7097809536", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 44", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:27.552000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:58:27.663000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097809536, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 44", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:27.552Z", "updatedAt": "2021-10-12T12:58:27.663Z", "archived": false}, "emitted_at": 1655280716937} +{"stream": "companies", "data": {"id": "7097809537", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 46", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:28.156000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:52.395000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097809537, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 46", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:28.156Z", "updatedAt": "2021-10-12T12:57:52.395Z", "archived": false}, "emitted_at": 1655280717300} +{"stream": "companies", "data": {"id": "7097809540", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 47", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:28.550000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:34.338000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097809540, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 47", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:28.550Z", "updatedAt": "2021-10-12T12:57:34.338Z", "archived": false}, "emitted_at": 1655280717302} +{"stream": "companies", "data": {"id": "7097809543", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 64", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:33.851000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:58:40.662000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097809543, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 64", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:33.851Z", "updatedAt": "2021-10-12T12:58:40.662Z", "archived": false}, "emitted_at": 1655280717304} +{"stream": "companies", "data": {"id": "7097809545", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 76", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:38.320000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:56.198000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097809545, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 76", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:38.320Z", "updatedAt": "2021-10-12T12:57:56.198Z", "archived": false}, "emitted_at": 1655280717306} +{"stream": "companies", "data": {"id": "7097813536", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 26", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:21.261000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:28.564000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097813536, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 26", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:21.261Z", "updatedAt": "2021-10-12T12:57:28.564Z", "archived": false}, "emitted_at": 1655280717307} +{"stream": "companies", "data": {"id": "7097813542", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 38", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:25.372000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:31.974000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097813542, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 38", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:25.372Z", "updatedAt": "2021-10-12T12:57:31.974Z", "archived": false}, "emitted_at": 1655280717309} +{"stream": "companies", "data": {"id": "7097813546", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 58", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:32.033000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:39.026000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097813546, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 58", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:32.033Z", "updatedAt": "2021-10-12T12:57:39.026Z", "archived": false}, "emitted_at": 1655280717310} +{"stream": "companies", "data": {"id": "7097813549", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 63", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:33.571000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:58:39.669000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097813549, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 63", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:33.571Z", "updatedAt": "2021-10-12T12:58:39.669Z", "archived": false}, "emitted_at": 1655280717311} +{"stream": "companies", "data": {"id": "7097813555", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 94", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:44.598000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:54.719000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097813555, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 94", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:44.598Z", "updatedAt": "2021-10-12T12:57:54.719Z", "archived": false}, "emitted_at": 1655280717312} +{"stream": "companies", "data": {"id": "7097820009", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 8", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:15.269000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:58:19.169000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097820009, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 8", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:15.269Z", "updatedAt": "2021-10-12T12:58:19.169Z", "archived": false}, "emitted_at": 1655280717313} +{"stream": "companies", "data": {"id": "7097820012", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 12", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:16.869000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:58:22.438000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097820012, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 12", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:16.869Z", "updatedAt": "2021-10-12T12:58:22.438Z", "archived": false}, "emitted_at": 1655280717315} +{"stream": "companies", "data": {"id": "7097820024", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 32", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:23.192000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:44.093000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097820024, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 32", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:23.192Z", "updatedAt": "2021-10-12T12:57:44.093Z", "archived": false}, "emitted_at": 1655280717316} +{"stream": "companies", "data": {"id": "7097820027", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 39", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:25.722000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:31.975000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097820027, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 39", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:25.722Z", "updatedAt": "2021-10-12T12:57:31.975Z", "archived": false}, "emitted_at": 1655280717317} +{"stream": "companies", "data": {"id": "7097820029", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 42", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:26.601000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:42.589000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097820029, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 42", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:26.601Z", "updatedAt": "2021-10-12T12:57:42.589Z", "archived": false}, "emitted_at": 1655280717317} +{"stream": "companies", "data": {"id": "7097820039", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 82", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:40.191000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:54.771000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097820039, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 82", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:40.191Z", "updatedAt": "2021-10-12T12:57:54.771Z", "archived": false}, "emitted_at": 1655280717318} +{"stream": "companies", "data": {"id": "7097820041", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 88", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:42.374000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:52.142000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097820041, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 88", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:42.374Z", "updatedAt": "2021-10-12T12:57:52.142Z", "archived": false}, "emitted_at": 1655280717319} +{"stream": "companies", "data": {"id": "7097820042", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 91", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:43.706000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:52.619000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097820042, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 91", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:43.706Z", "updatedAt": "2021-10-12T12:57:52.619Z", "archived": false}, "emitted_at": 1655280717320} +{"stream": "companies", "data": {"id": "7097823561", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 22", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:19.934000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:57:24.944000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097823561, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 22", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:19.934Z", "updatedAt": "2021-10-12T12:57:24.944Z", "archived": false}, "emitted_at": 1655280717321} +{"stream": "companies", "data": {"id": "7097823562", "properties": {"about_us": null, "address": null, "address2": null, "annualrevenue": null, "city": "Test City 24", "closedate": null, "closedate_timestamp_earliest_value_a2a17e6e": null, "country": "Bangladesh", "createdate": "2021-10-12T12:57:20.553000+00:00", "days_to_close": null, "description": "This is a placeholder page installed by the Ubuntu release of the Lighttpd server package.", "domain": "test.domain.net", "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "facebook_company_page": null, "facebookfans": null, "first_contact_createdate": null, "first_contact_createdate_timestamp_earliest_value_78b50eea": null, "first_conversion_date": null, "first_conversion_date_timestamp_earliest_value_61f58f2c": null, "first_conversion_event_name": null, "first_conversion_event_name_timestamp_earliest_value_68ddae0a": null, "first_deal_created_date": null, "founded_year": null, "googleplus_page": null, "hs_additional_domains": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_analytics_first_timestamp": null, "hs_analytics_first_timestamp_timestamp_earliest_value_11e3a63a": null, "hs_analytics_first_touch_converting_campaign": null, "hs_analytics_first_touch_converting_campaign_timestamp_earliest_value_4757fe10": null, "hs_analytics_first_visit_timestamp": null, "hs_analytics_first_visit_timestamp_timestamp_earliest_value_accc17ae": null, "hs_analytics_last_timestamp": null, "hs_analytics_last_timestamp_timestamp_latest_value_4e16365a": null, "hs_analytics_last_touch_converting_campaign": null, "hs_analytics_last_touch_converting_campaign_timestamp_latest_value_81a64e30": null, "hs_analytics_last_visit_timestamp": null, "hs_analytics_last_visit_timestamp_timestamp_latest_value_999a0fce": null, "hs_analytics_num_page_views": null, "hs_analytics_num_page_views_cardinality_sum_e46e85b0": null, "hs_analytics_num_visits": null, "hs_analytics_num_visits_cardinality_sum_53d952a6": null, "hs_analytics_source": null, "hs_analytics_source_data_1": null, "hs_analytics_source_data_1_timestamp_earliest_value_9b2f1fa1": null, "hs_analytics_source_data_2": null, "hs_analytics_source_data_2_timestamp_earliest_value_9b2f9400": null, "hs_analytics_source_timestamp_earliest_value_25a3a52c": null, "hs_avatar_filemanager_key": null, "hs_created_by_user_id": null, "hs_createdate": null, "hs_date_entered_customer": null, "hs_date_entered_evangelist": null, "hs_date_entered_lead": null, "hs_date_entered_marketingqualifiedlead": null, "hs_date_entered_opportunity": null, "hs_date_entered_other": null, "hs_date_entered_salesqualifiedlead": null, "hs_date_entered_subscriber": null, "hs_date_exited_customer": null, "hs_date_exited_evangelist": null, "hs_date_exited_lead": null, "hs_date_exited_marketingqualifiedlead": null, "hs_date_exited_opportunity": null, "hs_date_exited_other": null, "hs_date_exited_salesqualifiedlead": null, "hs_date_exited_subscriber": null, "hs_ideal_customer_profile": null, "hs_is_target_account": null, "hs_last_booked_meeting_date": null, "hs_last_logged_call_date": null, "hs_last_open_task_date": null, "hs_last_sales_activity_date": null, "hs_last_sales_activity_timestamp": null, "hs_lastmodifieddate": "2021-10-12T12:58:26.529000+00:00", "hs_latest_createdate_of_active_subscriptions": null, "hs_latest_meeting_activity": null, "hs_lead_status": null, "hs_merged_object_ids": null, "hs_num_blockers": 0.0, "hs_num_child_companies": 0.0, "hs_num_contacts_with_buying_roles": 0.0, "hs_num_decision_makers": 0.0, "hs_num_open_deals": 0.0, "hs_object_id": 7097823562, "hs_parent_company_id": null, "hs_pipeline": null, "hs_predictivecontactscore_v2": null, "hs_predictivecontactscore_v2_next_max_max_d4e58c1e": null, "hs_sales_email_last_replied": null, "hs_target_account": null, "hs_target_account_probability": 0.4076234698295593, "hs_target_account_recommendation_snooze_time": null, "hs_target_account_recommendation_state": null, "hs_time_in_customer": null, "hs_time_in_evangelist": null, "hs_time_in_lead": null, "hs_time_in_marketingqualifiedlead": null, "hs_time_in_opportunity": null, "hs_time_in_other": null, "hs_time_in_salesqualifiedlead": null, "hs_time_in_subscriber": null, "hs_total_deal_value": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "hubspotscore": null, "industry": "Airbyte Test", "is_public": true, "lifecyclestage": null, "linkedin_company_page": "https://www.linkedin.com/company/bd-domain", "linkedinbio": null, "name": "test company 24", "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 0.0, "num_associated_deals": null, "num_contacted_notes": null, "num_conversion_events": null, "num_conversion_events_cardinality_sum_d095f14b": null, "num_notes": null, "numberofemployees": null, "phone": "(877) 929-0687-00291", "recent_conversion_date": null, "recent_conversion_date_timestamp_latest_value_72856da1": null, "recent_conversion_event_name": null, "recent_conversion_event_name_timestamp_latest_value_66c820bf": null, "recent_deal_amount": null, "recent_deal_close_date": null, "state": "Massachusetts", "timezone": "Asia/Dhaka", "total_money_raised": null, "total_revenue": null, "twitterbio": null, "twitterfollowers": null, "twitterhandle": null, "type": null, "web_technologies": "apache;nginx;facebook_social_plugins;facebook_connect;google_analytics;facebook_like_button;centos", "website": "test.domain.net", "zip": "1216"}, "createdAt": "2021-10-12T12:57:20.553Z", "updatedAt": "2021-10-12T12:58:26.529Z", "archived": false}, "emitted_at": 1655280717322} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 1, "internalListId": 1, "createdAt": 1610634707244, "updatedAt": 1610634707244, "name": "tweeters", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1610634723407, "processing": "DONE", "lastProcessingStateChangeAt": 1610634723368, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717904} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 2, "internalListId": 2, "createdAt": 1610634770297, "updatedAt": 1610634770297, "name": "tweeters 1", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1610634784331, "processing": "DONE", "lastProcessingStateChangeAt": 1610634784289, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717905} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 3, "internalListId": 3, "createdAt": 1610634774245, "updatedAt": 1610634774245, "name": "tweeters 2", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1610634788303, "processing": "DONE", "lastProcessingStateChangeAt": 1610634788148, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717906} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 4, "internalListId": 4, "createdAt": 1610634776021, "updatedAt": 1610634776021, "name": "tweeters 3", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1610634788848, "processing": "DONE", "lastProcessingStateChangeAt": 1610634788812, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717906} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 5, "internalListId": 5, "createdAt": 1610634777202, "updatedAt": 1610634777202, "name": "tweeters 4", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1610634800213, "processing": "DONE", "lastProcessingStateChangeAt": 1610634800132, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717907} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 6, "internalListId": 6, "createdAt": 1610634778434, "updatedAt": 1610634778434, "name": "tweeters 5", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1610634800258, "processing": "DONE", "lastProcessingStateChangeAt": 1610634800215, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717907} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 7, "internalListId": 7, "createdAt": 1610634780202, "updatedAt": 1610634780202, "name": "tweeters 6", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1610634800935, "processing": "DONE", "lastProcessingStateChangeAt": 1610634800869, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717908} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 8, "internalListId": 8, "createdAt": 1610634780722, "updatedAt": 1610634780722, "name": "tweeters 7", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1610634804993, "processing": "DONE", "lastProcessingStateChangeAt": 1610634804926, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717908} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 9, "internalListId": 9, "createdAt": 1610634781321, "updatedAt": 1610634781321, "name": "tweeters 8", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1610634813311, "processing": "DONE", "lastProcessingStateChangeAt": 1610634813246, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717909} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 10, "internalListId": 10, "createdAt": 1610634781843, "updatedAt": 1610634781843, "name": "tweeters 9", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1610634810051, "processing": "DONE", "lastProcessingStateChangeAt": 1610634809945, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717909} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 11, "internalListId": 11, "createdAt": 1610634782448, "updatedAt": 1610634782448, "name": "tweeters 10", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1610634898403, "processing": "DONE", "lastProcessingStateChangeAt": 1610634898345, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717910} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 60, "internalListId": 60, "createdAt": 1614111029010, "updatedAt": 1614111029010, "name": "HubSpot Sample Imports - Contacts", "listType": "STATIC", "authorId": 12282590, "filters": [], "metaData": {"size": 3, "lastSizeChangeAt": 1614111042053, "processing": "DONE", "lastProcessingStateChangeAt": 1614111042074, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": false}, "emitted_at": 1655280717910} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 66, "internalListId": 66, "createdAt": 1634044326742, "updatedAt": 1634044326742, "name": "test contact_list 0", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044357572, "processing": "DONE", "lastProcessingStateChangeAt": 1634044357491, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717910} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 67, "internalListId": 67, "createdAt": 1634044352450, "updatedAt": 1634044352450, "name": "test contact_list 1", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044373475, "processing": "DONE", "lastProcessingStateChangeAt": 1634044373400, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717911} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 68, "internalListId": 68, "createdAt": 1634044352995, "updatedAt": 1634044352995, "name": "test contact_list 2", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044375566, "processing": "DONE", "lastProcessingStateChangeAt": 1634044375429, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717911} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 69, "internalListId": 69, "createdAt": 1634044353507, "updatedAt": 1634044353507, "name": "test contact_list 3", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044376975, "processing": "DONE", "lastProcessingStateChangeAt": 1634044376818, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717912} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 70, "internalListId": 70, "createdAt": 1634044354337, "updatedAt": 1634044354337, "name": "test contact_list 4", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044379862, "processing": "DONE", "lastProcessingStateChangeAt": 1634044379733, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717912} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 71, "internalListId": 71, "createdAt": 1634044354856, "updatedAt": 1634044354856, "name": "test contact_list 5", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044373397, "processing": "DONE", "lastProcessingStateChangeAt": 1634044373307, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717913} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 72, "internalListId": 72, "createdAt": 1634044355477, "updatedAt": 1634044355477, "name": "test contact_list 6", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044374951, "processing": "DONE", "lastProcessingStateChangeAt": 1634044374646, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717913} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 73, "internalListId": 73, "createdAt": 1634044356317, "updatedAt": 1634044356317, "name": "test contact_list 7", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044374836, "processing": "DONE", "lastProcessingStateChangeAt": 1634044374726, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717913} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 74, "internalListId": 74, "createdAt": 1634044356821, "updatedAt": 1634044356821, "name": "test contact_list 8", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044380784, "processing": "DONE", "lastProcessingStateChangeAt": 1634044380669, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717914} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 75, "internalListId": 75, "createdAt": 1634044357656, "updatedAt": 1634044357656, "name": "test contact_list 9", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044374297, "processing": "DONE", "lastProcessingStateChangeAt": 1634044374203, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717914} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 76, "internalListId": 76, "createdAt": 1634044358328, "updatedAt": 1634044358328, "name": "test contact_list 10", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044379941, "processing": "DONE", "lastProcessingStateChangeAt": 1634044379722, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717915} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 77, "internalListId": 77, "createdAt": 1634044358876, "updatedAt": 1634044358876, "name": "test contact_list 11", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044379452, "processing": "DONE", "lastProcessingStateChangeAt": 1634044379368, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717915} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 78, "internalListId": 78, "createdAt": 1634044360239, "updatedAt": 1634044360239, "name": "test contact_list 12", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044379730, "processing": "DONE", "lastProcessingStateChangeAt": 1634044379662, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717916} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 79, "internalListId": 79, "createdAt": 1634044360723, "updatedAt": 1634044360723, "name": "test contact_list 13", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044380024, "processing": "DONE", "lastProcessingStateChangeAt": 1634044379931, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717916} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 80, "internalListId": 80, "createdAt": 1634044361203, "updatedAt": 1634044361203, "name": "test contact_list 14", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044376402, "processing": "DONE", "lastProcessingStateChangeAt": 1634044376301, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717917} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 81, "internalListId": 81, "createdAt": 1634044362132, "updatedAt": 1634044362132, "name": "test contact_list 15", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044384827, "processing": "DONE", "lastProcessingStateChangeAt": 1634044384724, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717917} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 82, "internalListId": 82, "createdAt": 1634044362732, "updatedAt": 1634044362732, "name": "test contact_list 16", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044383093, "processing": "DONE", "lastProcessingStateChangeAt": 1634044382989, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717917} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 83, "internalListId": 83, "createdAt": 1634044363323, "updatedAt": 1634044363323, "name": "test contact_list 17", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044379604, "processing": "DONE", "lastProcessingStateChangeAt": 1634044379483, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717918} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 84, "internalListId": 84, "createdAt": 1634044364197, "updatedAt": 1634044364197, "name": "test contact_list 18", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044385083, "processing": "DONE", "lastProcessingStateChangeAt": 1634044385029, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717918} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 85, "internalListId": 85, "createdAt": 1634044364700, "updatedAt": 1634044364700, "name": "test contact_list 19", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044382961, "processing": "DONE", "lastProcessingStateChangeAt": 1634044382644, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717919} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 86, "internalListId": 86, "createdAt": 1634044365223, "updatedAt": 1634044365223, "name": "test contact_list 20", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044380904, "processing": "DONE", "lastProcessingStateChangeAt": 1634044380675, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717919} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 87, "internalListId": 87, "createdAt": 1634044365715, "updatedAt": 1634044365715, "name": "test contact_list 21", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044386709, "processing": "DONE", "lastProcessingStateChangeAt": 1634044386509, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717920} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 88, "internalListId": 88, "createdAt": 1634044366220, "updatedAt": 1634044366220, "name": "test contact_list 22", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044381593, "processing": "DONE", "lastProcessingStateChangeAt": 1634044381519, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717920} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 89, "internalListId": 89, "createdAt": 1634044366694, "updatedAt": 1634044366694, "name": "test contact_list 23", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044384324, "processing": "DONE", "lastProcessingStateChangeAt": 1634044384242, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717920} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 90, "internalListId": 90, "createdAt": 1634044367238, "updatedAt": 1634044367238, "name": "test contact_list 24", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044390445, "processing": "DONE", "lastProcessingStateChangeAt": 1634044390330, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717921} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 91, "internalListId": 91, "createdAt": 1634044367729, "updatedAt": 1634044367729, "name": "test contact_list 25", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044382353, "processing": "DONE", "lastProcessingStateChangeAt": 1634044382175, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717921} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 92, "internalListId": 92, "createdAt": 1634044368258, "updatedAt": 1634044368258, "name": "test contact_list 26", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044387006, "processing": "DONE", "lastProcessingStateChangeAt": 1634044386891, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717922} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 93, "internalListId": 93, "createdAt": 1634044368786, "updatedAt": 1634044368786, "name": "test contact_list 27", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044390556, "processing": "DONE", "lastProcessingStateChangeAt": 1634044390400, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717922} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 94, "internalListId": 94, "createdAt": 1634044369329, "updatedAt": 1634044369329, "name": "test contact_list 28", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044390828, "processing": "DONE", "lastProcessingStateChangeAt": 1634044390743, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717923} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 95, "internalListId": 95, "createdAt": 1634044369819, "updatedAt": 1634044369819, "name": "test contact_list 29", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044388825, "processing": "DONE", "lastProcessingStateChangeAt": 1634044388651, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717923} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 96, "internalListId": 96, "createdAt": 1634044370367, "updatedAt": 1634044370367, "name": "test contact_list 30", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044386887, "processing": "DONE", "lastProcessingStateChangeAt": 1634044386808, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717923} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 97, "internalListId": 97, "createdAt": 1634044371229, "updatedAt": 1634044371229, "name": "test contact_list 31", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044388201, "processing": "DONE", "lastProcessingStateChangeAt": 1634044388124, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717924} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 98, "internalListId": 98, "createdAt": 1634044371752, "updatedAt": 1634044371752, "name": "test contact_list 32", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044397922, "processing": "DONE", "lastProcessingStateChangeAt": 1634044397837, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717924} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 99, "internalListId": 99, "createdAt": 1634044372264, "updatedAt": 1634044372264, "name": "test contact_list 33", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044392721, "processing": "DONE", "lastProcessingStateChangeAt": 1634044392635, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717925} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 100, "internalListId": 100, "createdAt": 1634044372765, "updatedAt": 1634044372765, "name": "test contact_list 34", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044396116, "processing": "DONE", "lastProcessingStateChangeAt": 1634044396048, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717925} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 101, "internalListId": 101, "createdAt": 1634044373208, "updatedAt": 1634044373208, "name": "test contact_list 35", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044397644, "processing": "DONE", "lastProcessingStateChangeAt": 1634044397551, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717926} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 102, "internalListId": 102, "createdAt": 1634044373685, "updatedAt": 1634044373685, "name": "test contact_list 36", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044395923, "processing": "DONE", "lastProcessingStateChangeAt": 1634044395743, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717926} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 103, "internalListId": 103, "createdAt": 1634044374187, "updatedAt": 1634044374187, "name": "test contact_list 37", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044396021, "processing": "DONE", "lastProcessingStateChangeAt": 1634044395943, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717926} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 104, "internalListId": 104, "createdAt": 1634044374694, "updatedAt": 1634044374694, "name": "test contact_list 38", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044395741, "processing": "DONE", "lastProcessingStateChangeAt": 1634044395634, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717927} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 105, "internalListId": 105, "createdAt": 1634044375168, "updatedAt": 1634044375168, "name": "test contact_list 39", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044398168, "processing": "DONE", "lastProcessingStateChangeAt": 1634044398071, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717927} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 106, "internalListId": 106, "createdAt": 1634044375645, "updatedAt": 1634044375645, "name": "test contact_list 40", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044393436, "processing": "DONE", "lastProcessingStateChangeAt": 1634044393342, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717928} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 107, "internalListId": 107, "createdAt": 1634044376113, "updatedAt": 1634044376113, "name": "test contact_list 41", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044396221, "processing": "DONE", "lastProcessingStateChangeAt": 1634044396164, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717928} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 108, "internalListId": 108, "createdAt": 1634044376557, "updatedAt": 1634044376557, "name": "test contact_list 42", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044396161, "processing": "DONE", "lastProcessingStateChangeAt": 1634044396091, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717929} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 109, "internalListId": 109, "createdAt": 1634044376999, "updatedAt": 1634044376999, "name": "test contact_list 43", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044399465, "processing": "DONE", "lastProcessingStateChangeAt": 1634044399313, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717929} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 110, "internalListId": 110, "createdAt": 1634044377466, "updatedAt": 1634044377466, "name": "test contact_list 44", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044399985, "processing": "DONE", "lastProcessingStateChangeAt": 1634044399900, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717929} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 111, "internalListId": 111, "createdAt": 1634044377928, "updatedAt": 1634044377928, "name": "test contact_list 45", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044399310, "processing": "DONE", "lastProcessingStateChangeAt": 1634044399204, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717930} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 112, "internalListId": 112, "createdAt": 1634044378441, "updatedAt": 1634044378441, "name": "test contact_list 46", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044398939, "processing": "DONE", "lastProcessingStateChangeAt": 1634044398866, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717930} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 113, "internalListId": 113, "createdAt": 1634044378936, "updatedAt": 1634044378936, "name": "test contact_list 47", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044402393, "processing": "DONE", "lastProcessingStateChangeAt": 1634044402268, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717931} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 114, "internalListId": 114, "createdAt": 1634044379418, "updatedAt": 1634044379418, "name": "test contact_list 48", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044405259, "processing": "DONE", "lastProcessingStateChangeAt": 1634044405146, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717931} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 115, "internalListId": 115, "createdAt": 1634044379888, "updatedAt": 1634044379888, "name": "test contact_list 49", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044399505, "processing": "DONE", "lastProcessingStateChangeAt": 1634044399424, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717932} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 116, "internalListId": 116, "createdAt": 1634044380419, "updatedAt": 1634044380419, "name": "test contact_list 50", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044405024, "processing": "DONE", "lastProcessingStateChangeAt": 1634044404914, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717932} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 117, "internalListId": 117, "createdAt": 1634044380982, "updatedAt": 1634044380982, "name": "test contact_list 51", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044403055, "processing": "DONE", "lastProcessingStateChangeAt": 1634044402978, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717933} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 118, "internalListId": 118, "createdAt": 1634044381460, "updatedAt": 1634044381460, "name": "test contact_list 52", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044401342, "processing": "DONE", "lastProcessingStateChangeAt": 1634044401255, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717933} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 119, "internalListId": 119, "createdAt": 1634044382349, "updatedAt": 1634044382349, "name": "test contact_list 53", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044399539, "processing": "DONE", "lastProcessingStateChangeAt": 1634044399418, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717934} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 120, "internalListId": 120, "createdAt": 1634044382780, "updatedAt": 1634044382780, "name": "test contact_list 54", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044399021, "processing": "DONE", "lastProcessingStateChangeAt": 1634044398941, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717934} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 121, "internalListId": 121, "createdAt": 1634044383322, "updatedAt": 1634044383322, "name": "test contact_list 55", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044402471, "processing": "DONE", "lastProcessingStateChangeAt": 1634044402376, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717934} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 122, "internalListId": 122, "createdAt": 1634044383900, "updatedAt": 1634044383900, "name": "test contact_list 56", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044402214, "processing": "DONE", "lastProcessingStateChangeAt": 1634044402124, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717935} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 123, "internalListId": 123, "createdAt": 1634044384408, "updatedAt": 1634044384408, "name": "test contact_list 57", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044404911, "processing": "DONE", "lastProcessingStateChangeAt": 1634044404821, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717935} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 124, "internalListId": 124, "createdAt": 1634044384980, "updatedAt": 1634044384980, "name": "test contact_list 58", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044402623, "processing": "DONE", "lastProcessingStateChangeAt": 1634044402557, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717936} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 125, "internalListId": 125, "createdAt": 1634044385503, "updatedAt": 1634044385503, "name": "test contact_list 59", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044407589, "processing": "DONE", "lastProcessingStateChangeAt": 1634044407530, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717936} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 126, "internalListId": 126, "createdAt": 1634044385972, "updatedAt": 1634044385972, "name": "test contact_list 60", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044402865, "processing": "DONE", "lastProcessingStateChangeAt": 1634044402674, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717936} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 127, "internalListId": 127, "createdAt": 1634044386476, "updatedAt": 1634044386476, "name": "test contact_list 61", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044403092, "processing": "DONE", "lastProcessingStateChangeAt": 1634044403024, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717937} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 128, "internalListId": 128, "createdAt": 1634044387303, "updatedAt": 1634044387303, "name": "test contact_list 62", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044404565, "processing": "DONE", "lastProcessingStateChangeAt": 1634044404479, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717937} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 129, "internalListId": 129, "createdAt": 1634044387777, "updatedAt": 1634044387777, "name": "test contact_list 63", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044411031, "processing": "DONE", "lastProcessingStateChangeAt": 1634044410779, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717938} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 130, "internalListId": 130, "createdAt": 1634044388278, "updatedAt": 1634044388278, "name": "test contact_list 64", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044407946, "processing": "DONE", "lastProcessingStateChangeAt": 1634044407872, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717938} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 131, "internalListId": 131, "createdAt": 1634044388851, "updatedAt": 1634044388851, "name": "test contact_list 65", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044411194, "processing": "DONE", "lastProcessingStateChangeAt": 1634044411028, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717938} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 132, "internalListId": 132, "createdAt": 1634044389379, "updatedAt": 1634044389379, "name": "test contact_list 66", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044410235, "processing": "DONE", "lastProcessingStateChangeAt": 1634044410096, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717939} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 133, "internalListId": 133, "createdAt": 1634044389957, "updatedAt": 1634044389957, "name": "test contact_list 67", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044407497, "processing": "DONE", "lastProcessingStateChangeAt": 1634044407431, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717939} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 134, "internalListId": 134, "createdAt": 1634044390494, "updatedAt": 1634044390494, "name": "test contact_list 68", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044413746, "processing": "DONE", "lastProcessingStateChangeAt": 1634044413683, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717940} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 135, "internalListId": 135, "createdAt": 1634044390992, "updatedAt": 1634044390992, "name": "test contact_list 69", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044410835, "processing": "DONE", "lastProcessingStateChangeAt": 1634044410757, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717940} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 136, "internalListId": 136, "createdAt": 1634044391492, "updatedAt": 1634044391492, "name": "test contact_list 70", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044410771, "processing": "DONE", "lastProcessingStateChangeAt": 1634044410577, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717940} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 137, "internalListId": 137, "createdAt": 1634044392043, "updatedAt": 1634044392043, "name": "test contact_list 71", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044416501, "processing": "DONE", "lastProcessingStateChangeAt": 1634044416446, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717941} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 138, "internalListId": 138, "createdAt": 1634044392544, "updatedAt": 1634044392544, "name": "test contact_list 72", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044408890, "processing": "DONE", "lastProcessingStateChangeAt": 1634044408806, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717941} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 139, "internalListId": 139, "createdAt": 1634044393035, "updatedAt": 1634044393035, "name": "test contact_list 73", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044415569, "processing": "DONE", "lastProcessingStateChangeAt": 1634044415417, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717942} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 140, "internalListId": 140, "createdAt": 1634044393476, "updatedAt": 1634044393476, "name": "test contact_list 74", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044416444, "processing": "DONE", "lastProcessingStateChangeAt": 1634044416382, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717942} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 141, "internalListId": 141, "createdAt": 1634044394294, "updatedAt": 1634044394294, "name": "test contact_list 75", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044414311, "processing": "DONE", "lastProcessingStateChangeAt": 1634044414213, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717942} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 142, "internalListId": 142, "createdAt": 1634044394834, "updatedAt": 1634044394834, "name": "test contact_list 76", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044413679, "processing": "DONE", "lastProcessingStateChangeAt": 1634044413585, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717943} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 143, "internalListId": 143, "createdAt": 1634044395403, "updatedAt": 1634044395403, "name": "test contact_list 77", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044420156, "processing": "DONE", "lastProcessingStateChangeAt": 1634044419996, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717943} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 144, "internalListId": 144, "createdAt": 1634044395863, "updatedAt": 1634044395863, "name": "test contact_list 78", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044410916, "processing": "DONE", "lastProcessingStateChangeAt": 1634044410837, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717944} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 145, "internalListId": 145, "createdAt": 1634044396322, "updatedAt": 1634044396322, "name": "test contact_list 79", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044414071, "processing": "DONE", "lastProcessingStateChangeAt": 1634044413997, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717944} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 146, "internalListId": 146, "createdAt": 1634044396797, "updatedAt": 1634044396797, "name": "test contact_list 80", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044418773, "processing": "DONE", "lastProcessingStateChangeAt": 1634044418703, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717944} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 147, "internalListId": 147, "createdAt": 1634044397262, "updatedAt": 1634044397262, "name": "test contact_list 81", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044415518, "processing": "DONE", "lastProcessingStateChangeAt": 1634044415462, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717945} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 148, "internalListId": 148, "createdAt": 1634044398079, "updatedAt": 1634044398079, "name": "test contact_list 82", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044410718, "processing": "DONE", "lastProcessingStateChangeAt": 1634044410597, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717945} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 149, "internalListId": 149, "createdAt": 1634044398932, "updatedAt": 1634044398932, "name": "test contact_list 83", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044412807, "processing": "DONE", "lastProcessingStateChangeAt": 1634044412503, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717946} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 150, "internalListId": 150, "createdAt": 1634044399758, "updatedAt": 1634044399758, "name": "test contact_list 84", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044425734, "processing": "DONE", "lastProcessingStateChangeAt": 1634044425670, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717946} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 151, "internalListId": 151, "createdAt": 1634044400187, "updatedAt": 1634044400187, "name": "test contact_list 85", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044416350, "processing": "DONE", "lastProcessingStateChangeAt": 1634044416292, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717946} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 152, "internalListId": 152, "createdAt": 1634044400669, "updatedAt": 1634044400669, "name": "test contact_list 86", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044416577, "processing": "DONE", "lastProcessingStateChangeAt": 1634044416522, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717947} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 153, "internalListId": 153, "createdAt": 1634044401532, "updatedAt": 1634044401532, "name": "test contact_list 87", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044419863, "processing": "DONE", "lastProcessingStateChangeAt": 1634044419551, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717947} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 154, "internalListId": 154, "createdAt": 1634044401969, "updatedAt": 1634044401969, "name": "test contact_list 88", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044417409, "processing": "DONE", "lastProcessingStateChangeAt": 1634044417345, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717948} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 155, "internalListId": 155, "createdAt": 1634044402501, "updatedAt": 1634044402501, "name": "test contact_list 89", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044420588, "processing": "DONE", "lastProcessingStateChangeAt": 1634044420530, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717948} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 156, "internalListId": 156, "createdAt": 1634044402938, "updatedAt": 1634044402938, "name": "test contact_list 90", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044419566, "processing": "DONE", "lastProcessingStateChangeAt": 1634044419493, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717948} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 157, "internalListId": 157, "createdAt": 1634044403439, "updatedAt": 1634044403439, "name": "test contact_list 91", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044424005, "processing": "DONE", "lastProcessingStateChangeAt": 1634044423934, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717949} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 158, "internalListId": 158, "createdAt": 1634044403927, "updatedAt": 1634044403927, "name": "test contact_list 92", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044426885, "processing": "DONE", "lastProcessingStateChangeAt": 1634044426781, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717949} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 159, "internalListId": 159, "createdAt": 1634044404406, "updatedAt": 1634044404406, "name": "test contact_list 93", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044425161, "processing": "DONE", "lastProcessingStateChangeAt": 1634044425101, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717950} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 160, "internalListId": 160, "createdAt": 1634044404882, "updatedAt": 1634044404882, "name": "test contact_list 94", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044425099, "processing": "DONE", "lastProcessingStateChangeAt": 1634044425035, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717950} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 161, "internalListId": 161, "createdAt": 1634044405363, "updatedAt": 1634044405363, "name": "test contact_list 95", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044426994, "processing": "DONE", "lastProcessingStateChangeAt": 1634044426887, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717951} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 162, "internalListId": 162, "createdAt": 1634044405868, "updatedAt": 1634044405868, "name": "test contact_list 96", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044431273, "processing": "DONE", "lastProcessingStateChangeAt": 1634044431167, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717951} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 163, "internalListId": 163, "createdAt": 1634044406419, "updatedAt": 1634044406419, "name": "test contact_list 97", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044427870, "processing": "DONE", "lastProcessingStateChangeAt": 1634044427787, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717951} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 164, "internalListId": 164, "createdAt": 1634044406868, "updatedAt": 1634044406868, "name": "test contact_list 98", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044427083, "processing": "DONE", "lastProcessingStateChangeAt": 1634044426996, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717952} +{"stream": "contact_lists", "data": {"portalId": 8727216, "listId": 165, "internalListId": 165, "createdAt": 1634044407353, "updatedAt": 1634044407353, "name": "test contact_list 99", "listType": "DYNAMIC", "filters": [[{"withinTimeMode": "PAST", "operator": "EQ", "filterFamily": "PropertyValue", "type": "string", "property": "twitterhandle", "value": "@hubspot"}]], "metaData": {"size": 0, "lastSizeChangeAt": 1634044431563, "processing": "DONE", "lastProcessingStateChangeAt": 1634044431492, "error": "", "listReferencesCount": null, "parentFolderId": null}, "archived": false, "teamIds": [], "dynamic": true}, "emitted_at": 1655280717952} +{"stream": "deal_pipelines", "data": {"label": "New Business Pipeline", "displayOrder": 3, "active": true, "stages": [{"label": "Success! Closed Won", "displayOrder": 2, "metadata": {"isClosed": "true", "probability": "1.0"}, "stageId": "customclosedwonstage", "createdAt": 1610635973956, "updatedAt": null, "active": true}, {"label": "Initial Qualification", "displayOrder": 0, "metadata": {"isClosed": "false", "probability": "0.1"}, "stageId": "9567448", "createdAt": 1610635973956, "updatedAt": null, "active": true}, {"label": "Negotiation", "displayOrder": 1, "metadata": {"isClosed": "false", "probability": "0.5"}, "stageId": "9567449", "createdAt": 1610635973956, "updatedAt": null, "active": true}], "objectType": "DEAL", "objectTypeId": "0-3", "pipelineId": "b9152945-a594-4835-9676-a6f405fecd71", "createdAt": 1610635973956, "updatedAt": 1610635973956, "default": false}, "emitted_at": 1655280719107} +{"stream": "deal_pipelines", "data": {"label": "Sales Pipeline", "displayOrder": 0, "active": true, "stages": [{"label": "Presentation Scheduled", "displayOrder": 2, "metadata": {"isClosed": "false", "probability": "0.6"}, "stageId": "presentationscheduled", "createdAt": 0, "updatedAt": null, "active": true}, {"label": "Closed Won", "displayOrder": 5, "metadata": {"isClosed": "true", "probability": "1.0"}, "stageId": "closedwon", "createdAt": 0, "updatedAt": null, "active": true}, {"label": "Closed Lost", "displayOrder": 6, "metadata": {"isClosed": "true", "probability": "0.0"}, "stageId": "closedlost", "createdAt": 0, "updatedAt": null, "active": true}, {"label": "Appointment Scheduled", "displayOrder": 0, "metadata": {"isClosed": "false", "probability": "0.2"}, "stageId": "appointmentscheduled", "createdAt": 0, "updatedAt": null, "active": true}, {"label": "Contract Sent", "displayOrder": 4, "metadata": {"isClosed": "false", "probability": "0.9"}, "stageId": "contractsent", "createdAt": 0, "updatedAt": null, "active": true}, {"label": "Qualified To Buy", "displayOrder": 1, "metadata": {"isClosed": "false", "probability": "0.4"}, "stageId": "qualifiedtobuy", "createdAt": 0, "updatedAt": null, "active": true}, {"label": "Decision Maker Bought-In", "displayOrder": 3, "metadata": {"isClosed": "false", "probability": "0.8"}, "stageId": "decisionmakerboughtin", "createdAt": 0, "updatedAt": null, "active": true}], "objectType": "DEAL", "objectTypeId": "0-3", "pipelineId": "default", "createdAt": 0, "updatedAt": 0, "default": true}, "emitted_at": 1655280719111} +{"stream": "deals", "data": {"id": "5388213824", "properties": {"amount": 10.0, "amount_in_home_currency": 10.0, "closed_lost_reason": null, "closed_won_reason": null, "closedate": "2014-08-31T00:00:00+00:00", "createdate": "2021-06-02T14:11:49.985000+00:00", "days_to_close": 0.0, "dealname": "Tim's Newer Deal", "dealstage": null, "dealtype": "newbusiness", "description": null, "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "hs_acv": 10.0, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_collaborator_owner_ids": null, "hs_all_deal_split_owner_ids": null, "hs_all_owner_ids": "65568071", "hs_all_team_ids": null, "hs_analytics_source": "OFFLINE", "hs_analytics_source_data_1": "API", "hs_analytics_source_data_2": null, "hs_arr": 0.0, "hs_campaign": null, "hs_closed_amount": 0.0, "hs_closed_amount_in_home_currency": 0.0, "hs_created_by_user_id": null, "hs_createdate": "2021-06-02T14:11:49.985000+00:00", "hs_date_entered_9567448": null, "hs_date_entered_9567449": null, "hs_date_entered_appointmentscheduled": null, "hs_date_entered_closedlost": null, "hs_date_entered_closedwon": null, "hs_date_entered_contractsent": null, "hs_date_entered_customclosedwonstage": null, "hs_date_entered_decisionmakerboughtin": null, "hs_date_entered_presentationscheduled": null, "hs_date_entered_qualifiedtobuy": null, "hs_date_exited_9567448": null, "hs_date_exited_9567449": null, "hs_date_exited_appointmentscheduled": null, "hs_date_exited_closedlost": null, "hs_date_exited_closedwon": null, "hs_date_exited_contractsent": null, "hs_date_exited_customclosedwonstage": null, "hs_date_exited_decisionmakerboughtin": null, "hs_date_exited_presentationscheduled": null, "hs_date_exited_qualifiedtobuy": null, "hs_deal_amount_calculation_preference": null, "hs_deal_stage_probability": 0.0, "hs_deal_stage_probability_shadow": 0.0, "hs_forecast_amount": 10.0, "hs_forecast_probability": null, "hs_is_closed": null, "hs_is_closed_won": true, "hs_is_deal_split": true, "hs_lastmodifieddate": "2021-10-12T13:50:15.127000+00:00", "hs_latest_meeting_activity": null, "hs_likelihood_to_close": null, "hs_line_item_global_term_hs_discount_percentage": null, "hs_line_item_global_term_hs_discount_percentage_enabled": null, "hs_line_item_global_term_hs_recurring_billing_period": null, "hs_line_item_global_term_hs_recurring_billing_period_enabled": null, "hs_line_item_global_term_hs_recurring_billing_start_date": null, "hs_line_item_global_term_hs_recurring_billing_start_date_enabled": null, "hs_line_item_global_term_recurringbillingfrequency": null, "hs_line_item_global_term_recurringbillingfrequency_enabled": null, "hs_manual_forecast_category": null, "hs_merged_object_ids": null, "hs_mrr": 0.0, "hs_next_step": null, "hs_num_associated_deal_splits": 0.0, "hs_num_target_accounts": 0.0, "hs_object_id": 5388213824, "hs_predicted_amount": null, "hs_predicted_amount_in_home_currency": null, "hs_priority": null, "hs_projected_amount": 0.0, "hs_projected_amount_in_home_currency": 0.0, "hs_sales_email_last_replied": null, "hs_tcv": 10.0, "hs_time_in_9567448": null, "hs_time_in_9567449": null, "hs_time_in_appointmentscheduled": null, "hs_time_in_closedlost": null, "hs_time_in_closedwon": null, "hs_time_in_contractsent": null, "hs_time_in_customclosedwonstage": null, "hs_time_in_decisionmakerboughtin": null, "hs_time_in_presentationscheduled": null, "hs_time_in_qualifiedtobuy": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": 12282590, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": "23660227", "hubspot_owner_assigneddate": "2021-06-02T14:11:49.985000+00:00", "hubspot_owner_id": "65568071", "hubspot_team_id": null, "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 1.0, "num_contacted_notes": null, "num_notes": null, "pipeline": null}, "createdAt": "2021-06-02T14:11:49.985Z", "updatedAt": "2021-10-12T13:50:15.127Z", "archived": false, "companies": ["5000526215", "5000526215"], "line items": ["2089616136"], "contacts": ["551"]}, "emitted_at": 1655280720600} +{"stream": "deals", "data": {"id": "5388306989", "properties": {"amount": 60000.0, "amount_in_home_currency": 60000.0, "closed_lost_reason": null, "closed_won_reason": null, "closedate": "2014-08-31T00:00:00+00:00", "createdate": "2021-06-02T14:12:00.029000+00:00", "days_to_close": 0.0, "dealname": "Tim's Newer Deal 2", "dealstage": null, "dealtype": "newbusiness", "description": null, "engagements_last_meeting_booked": null, "engagements_last_meeting_booked_campaign": null, "engagements_last_meeting_booked_medium": null, "engagements_last_meeting_booked_source": null, "hs_acv": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_collaborator_owner_ids": null, "hs_all_deal_split_owner_ids": null, "hs_all_owner_ids": "65568071", "hs_all_team_ids": null, "hs_analytics_source": "OFFLINE", "hs_analytics_source_data_1": "API", "hs_analytics_source_data_2": null, "hs_arr": null, "hs_campaign": null, "hs_closed_amount": 0.0, "hs_closed_amount_in_home_currency": 0.0, "hs_created_by_user_id": null, "hs_createdate": "2021-06-02T14:12:00.029000+00:00", "hs_date_entered_9567448": null, "hs_date_entered_9567449": null, "hs_date_entered_appointmentscheduled": null, "hs_date_entered_closedlost": null, "hs_date_entered_closedwon": null, "hs_date_entered_contractsent": null, "hs_date_entered_customclosedwonstage": null, "hs_date_entered_decisionmakerboughtin": null, "hs_date_entered_presentationscheduled": null, "hs_date_entered_qualifiedtobuy": null, "hs_date_exited_9567448": null, "hs_date_exited_9567449": null, "hs_date_exited_appointmentscheduled": null, "hs_date_exited_closedlost": null, "hs_date_exited_closedwon": null, "hs_date_exited_contractsent": null, "hs_date_exited_customclosedwonstage": null, "hs_date_exited_decisionmakerboughtin": null, "hs_date_exited_presentationscheduled": null, "hs_date_exited_qualifiedtobuy": null, "hs_deal_amount_calculation_preference": null, "hs_deal_stage_probability": 0.0, "hs_deal_stage_probability_shadow": 0.0, "hs_forecast_amount": 60000.0, "hs_forecast_probability": null, "hs_is_closed": null, "hs_is_closed_won": true, "hs_is_deal_split": true, "hs_lastmodifieddate": "2021-09-09T09:56:32.554000+00:00", "hs_latest_meeting_activity": null, "hs_likelihood_to_close": null, "hs_line_item_global_term_hs_discount_percentage": null, "hs_line_item_global_term_hs_discount_percentage_enabled": null, "hs_line_item_global_term_hs_recurring_billing_period": null, "hs_line_item_global_term_hs_recurring_billing_period_enabled": null, "hs_line_item_global_term_hs_recurring_billing_start_date": null, "hs_line_item_global_term_hs_recurring_billing_start_date_enabled": null, "hs_line_item_global_term_recurringbillingfrequency": null, "hs_line_item_global_term_recurringbillingfrequency_enabled": null, "hs_manual_forecast_category": null, "hs_merged_object_ids": null, "hs_mrr": null, "hs_next_step": null, "hs_num_associated_deal_splits": 0.0, "hs_num_target_accounts": 0.0, "hs_object_id": 5388306989, "hs_predicted_amount": null, "hs_predicted_amount_in_home_currency": null, "hs_priority": null, "hs_projected_amount": 0.0, "hs_projected_amount_in_home_currency": 0.0, "hs_sales_email_last_replied": null, "hs_tcv": null, "hs_time_in_9567448": null, "hs_time_in_9567449": null, "hs_time_in_appointmentscheduled": null, "hs_time_in_closedlost": null, "hs_time_in_closedwon": null, "hs_time_in_contractsent": null, "hs_time_in_customclosedwonstage": null, "hs_time_in_decisionmakerboughtin": null, "hs_time_in_presentationscheduled": null, "hs_time_in_qualifiedtobuy": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": "23660227", "hubspot_owner_assigneddate": "2021-06-02T14:12:00.029000+00:00", "hubspot_owner_id": "65568071", "hubspot_team_id": null, "notes_last_contacted": null, "notes_last_updated": null, "notes_next_activity_date": null, "num_associated_contacts": 1.0, "num_contacted_notes": null, "num_notes": null, "pipeline": null}, "createdAt": "2021-06-02T14:12:00.029Z", "updatedAt": "2021-09-09T09:56:32.554Z", "archived": false, "companies": ["5000526215", "5000526215"], "contacts": ["551"]}, "emitted_at": 1655280720601} +{"stream": "email_events", "data": {"appName": "BatchTest", "duration": 0, "deviceType": "COMPUTER", "browser": {"name": "Google Image Cache", "family": "Google Image Cache", "producer": "", "producerUrl": "", "type": "Proxy", "url": "", "version": []}, "userAgent": "Mozilla/5.0 (Windows NT 5.1; rv:11.0) Gecko Firefox/11.0 (via ggpht.com GoogleImageProxy)", "created": 1614191191202, "location": {"country": "Unknown", "state": "Unknown", "city": "Unknown", "zipcode": "Unknown"}, "id": "17d3fcc4-bc34-38b4-9103-69b5896bbdde", "smtpId": null, "sentBy": {"id": "dd239309-7866-4705-a3e9-c571dd349477", "created": 1614119023182}, "recipient": "integration-test@airbyte.io", "portalId": 8727216, "type": "OPEN", "filteredEvent": false, "appId": 20053, "emailCampaignId": 2}, "emitted_at": 1655280721339} +{"stream": "email_events", "data": {"appName": "BatchTest", "duration": 0, "deviceType": "COMPUTER", "browser": {"name": "Google Image Cache", "family": "Google Image Cache", "producer": "", "producerUrl": "", "type": "Proxy", "url": "", "version": []}, "userAgent": "Mozilla/5.0 (Windows NT 5.1; rv:11.0) Gecko Firefox/11.0 (via ggpht.com GoogleImageProxy)", "created": 1614122124339, "location": {"country": "Unknown", "state": "Unknown", "city": "Unknown", "zipcode": "Unknown"}, "id": "e5cbe134-db76-32cb-9e82-9dafcbaf8b64", "smtpId": null, "sentBy": {"id": "dd239309-7866-4705-a3e9-c571dd349477", "created": 1614119023182}, "recipient": "integration-test@airbyte.io", "portalId": 8727216, "type": "OPEN", "filteredEvent": false, "appId": 20053, "emailCampaignId": 2}, "emitted_at": 1655280721347} +{"stream": "email_events", "data": {"appName": "BatchTest", "duration": 1229, "deviceType": "COMPUTER", "browser": {"name": "Microsoft Edge 12.246", "family": "Microsoft Edge", "producer": "Microsoft Corporation.", "producerUrl": "https://www.microsoft.com/about/", "type": "Browser", "url": "https://en.wikipedia.org/wiki/Microsoft_Edge", "version": ["12.246"]}, "userAgent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36 Edge/12.246 Mozilla/5.0", "created": 1614119026757, "location": {"country": "UNITED STATES", "state": "california", "city": "mountain view", "latitude": 37.40599, "longitude": -122.078514, "zipcode": "94043"}, "id": "35b79cd1-3527-3ae7-b316-be0bbf872839", "smtpId": null, "sentBy": {"id": "dd239309-7866-4705-a3e9-c571dd349477", "created": 1614119023182}, "recipient": "integration-test@airbyte.io", "portalId": 8727216, "type": "OPEN", "filteredEvent": true, "appId": 20053, "emailCampaignId": 2}, "emitted_at": 1655280721348} +{"stream": "email_events", "data": {"appName": "BatchTest", "duration": 2303, "deviceType": "COMPUTER", "browser": {"name": "Microsoft Edge 12.246", "family": "Microsoft Edge", "producer": "Microsoft Corporation.", "producerUrl": "https://www.microsoft.com/about/", "type": "Browser", "url": "https://en.wikipedia.org/wiki/Microsoft_Edge", "version": ["12.246"]}, "userAgent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36 Edge/12.246 Mozilla/5.0", "created": 1614119025477, "location": {"country": "UNITED STATES", "state": "california", "city": "mountain view", "latitude": 37.40599, "longitude": -122.078514, "zipcode": "94043"}, "id": "73b6e0a9-b6a2-3fc6-9faf-d69ca3c254dd", "smtpId": null, "sentBy": {"id": "dd239309-7866-4705-a3e9-c571dd349477", "created": 1614119023182}, "recipient": "integration-test@airbyte.io", "portalId": 8727216, "type": "OPEN", "filteredEvent": true, "appId": 20053, "emailCampaignId": 2}, "emitted_at": 1655280721349} +{"stream": "email_events", "data": {"appName": "BatchTest", "response": "250 2.0.0 OK cn15si96389qvb.103 - gsmtp", "attempt": 1, "created": 1614119023933, "id": "cf24d1cb-0242-3728-ab16-c4f3892c0333", "smtpId": null, "sentBy": {"id": "dd239309-7866-4705-a3e9-c571dd349477", "created": 1614119023182}, "recipient": "integration-test@airbyte.io", "portalId": 8727216, "type": "DELIVERED", "appId": 20053, "emailCampaignId": 2}, "emitted_at": 1655280721350} +{"stream": "email_events", "data": {"appName": "BatchTest", "attempt": 1, "created": 1614119023933, "id": "ebe79ee1-8a9b-334e-b7ef-ddda766f3b3b", "smtpId": null, "sentBy": {"id": "dd239309-7866-4705-a3e9-c571dd349477", "created": 1614119023182}, "recipient": "integration-test@airbyte.io", "portalId": 8727216, "type": "PROCESSED", "appId": 20053, "emailCampaignId": 2}, "emitted_at": 1655280721351} +{"stream": "email_events", "data": {"appName": "BatchTest", "emailCampaignId": 2, "recipient": "integration-test@airbyte.io", "portalId": 8727216, "appId": 20053, "created": 1614119023182, "id": "dd239309-7866-4705-a3e9-c571dd349477", "subject": "Preview - Test subject", "from": "noreply@hubspot.com", "cc": [], "bcc": [], "replyTo": [], "smtpId": null, "sentBy": {"id": "dd239309-7866-4705-a3e9-c571dd349477", "created": 1614119023182}, "type": "SENT"}, "emitted_at": 1655280721352} +{"stream": "email_events", "data": {"appName": "BatchTest", "duration": 2498, "created": 1616173245743, "browser": {"name": "Microsoft Edge 12.246", "family": "Microsoft Edge", "producer": "Microsoft Corporation.", "producerUrl": "https://www.microsoft.com/about/", "type": "Browser", "url": "https://en.wikipedia.org/wiki/Microsoft_Edge", "version": ["12.246"]}, "deviceType": "COMPUTER", "userAgent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36 Edge/12.246 Mozilla/5.0", "location": {"country": "UNITED STATES", "state": "california", "city": "mountain view", "latitude": 37.4043, "longitude": -122.0748, "zipcode": "94043"}, "id": "9ff40077-4b32-3ec5-9aea-d69fd12683b9", "recipient": "kulak.eugene@gmail.com", "sentBy": {"id": "9e68f5c3-45ff-445a-8d76-9050474425f3", "created": 1616173243740}, "smtpId": null, "portalId": 8727216, "type": "OPEN", "filteredEvent": false, "appId": 20053, "emailCampaignId": 2}, "emitted_at": 1655280721644} +{"stream": "email_events", "data": {"appName": "BatchTest", "response": "250 2.0.0 OK n17si3081140qtv.136 - gsmtp", "created": 1616173244650, "attempt": 1, "id": "7dd43432-1fed-3959-bb42-38c7d193605c", "recipient": "kulak.eugene@gmail.com", "sentBy": {"id": "9e68f5c3-45ff-445a-8d76-9050474425f3", "created": 1616173243740}, "smtpId": null, "portalId": 8727216, "type": "DELIVERED", "appId": 20053, "emailCampaignId": 2}, "emitted_at": 1655280721645} +{"stream": "email_events", "data": {"appName": "BatchTest", "created": 1616173244650, "attempt": 1, "id": "c92658da-7bdf-35d9-91e8-6738ae284491", "recipient": "kulak.eugene@gmail.com", "sentBy": {"id": "9e68f5c3-45ff-445a-8d76-9050474425f3", "created": 1616173243740}, "smtpId": null, "portalId": 8727216, "type": "PROCESSED", "appId": 20053, "emailCampaignId": 2}, "emitted_at": 1655280721646} +{"stream": "email_events", "data": {"appName": "BatchTest", "created": 1616173243740, "emailCampaignId": 2, "recipient": "kulak.eugene@gmail.com", "portalId": 8727216, "appId": 20053, "id": "9e68f5c3-45ff-445a-8d76-9050474425f3", "replyTo": [], "subject": "Preview - Test subj", "cc": [], "bcc": [], "from": "noreply@hubspot.com", "sentBy": {"id": "9e68f5c3-45ff-445a-8d76-9050474425f3", "created": 1616173243740}, "smtpId": null, "type": "SENT"}, "emitted_at": 1655280721647} +{"stream": "email_events", "data": {"created": 1616173134301, "recipient": "kulak.eugene@gmail.com", "sourceId": "SOURCE_HUBSPOT_CUSTOMER", "subscriptions": [], "portalSubscriptionStatus": "SUBSCRIBED", "source": "SOURCE_HUBSPOT_CUSTOMER", "id": "d70b78b9-a411-4d3e-808b-fe931be35b43", "portalId": 8727216, "type": "STATUSCHANGE", "appId": 0, "emailCampaignId": 0}, "emitted_at": 1655280721648} +{"stream": "email_events", "data": {"created": 1616173134301, "recipient": "kulak.eugene@gmail.com", "sourceId": "SOURCE_HUBSPOT_CUSTOMER", "subscriptions": [{"id": 10798197, "status": "SUBSCRIBED", "legalBasisChange": {"legalBasisType": "PERFORMANCE_OF_CONTRACT", "legalBasisExplanation": "erererer", "optState": "OPT_IN"}}], "source": "SOURCE_HUBSPOT_CUSTOMER", "id": "ff118718-786d-4a35-94f9-6bbd413654de", "portalId": 8727216, "type": "STATUSCHANGE", "appId": 0, "emailCampaignId": 0}, "emitted_at": 1655280721648} +{"stream": "email_events", "data": {"created": 1616173106737, "recipient": "kulak.eugene@gmail.com", "sourceId": "PropertyChangeHandler", "subscriptions": [], "portalSubscriptionStatus": "SUBSCRIBED", "source": "SOURCE_HUBSPOT_CUSTOMER", "id": "24539f1f-0b20-4296-a5bf-6ba3bb9dc1b8", "portalId": 8727216, "type": "STATUSCHANGE", "appId": 0, "emailCampaignId": 0}, "emitted_at": 1655280721649} +{"stream": "email_events", "data": {"created": 1616173054611, "recipient": "sherif@dataline.io", "sourceId": "SOURCE_HUBSPOT_CUSTOMER", "subscriptions": [], "portalSubscriptionStatus": "SUBSCRIBED", "source": "SOURCE_HUBSPOT_CUSTOMER", "id": "cb6107f5-ba69-403d-8de5-4d206c774948", "portalId": 8727216, "type": "STATUSCHANGE", "appId": 0, "emailCampaignId": 0}, "emitted_at": 1655280721650} +{"stream": "email_events", "data": {"created": 1616173054611, "recipient": "sherif@dataline.io", "sourceId": "SOURCE_HUBSPOT_CUSTOMER", "subscriptions": [{"id": 10798197, "status": "SUBSCRIBED", "legalBasisChange": {"legalBasisType": "LEGITIMATE_INTEREST_CLIENT", "legalBasisExplanation": "because", "optState": "OPT_IN"}}], "source": "SOURCE_HUBSPOT_CUSTOMER", "id": "e0c1eb45-612f-4beb-91a2-4352fbbbd100", "portalId": 8727216, "type": "STATUSCHANGE", "appId": 0, "emailCampaignId": 0}, "emitted_at": 1655280721650} +{"stream": "email_events", "data": {"appName": "Batch", "dropMessage": "", "dropReason": "MTA_IGNORE", "created": 1615506409286, "attempt": 5, "id": "709911d6-ba90-3c31-beb1-88f3359374ee", "recipient": "michael.scott@dundermifflin.com", "sentBy": {"id": "33a9449c-9c37-4dd9-b3f3-0f8fc9a9d2a2", "created": 1615504687764}, "smtpId": null, "portalId": 8727216, "type": "DROPPED", "appId": 113, "emailCampaignId": 115429485}, "emitted_at": 1655280721651} +{"stream": "email_events", "data": {"appName": "Batch", "attempt": 4, "response": "", "created": 1615506290222, "id": "d0efceb4-638c-3313-9a20-62a44e1c2b85", "recipient": "michael.scott@dundermifflin.com", "sentBy": {"id": "33a9449c-9c37-4dd9-b3f3-0f8fc9a9d2a2", "created": 1615504687764}, "smtpId": null, "portalId": 8727216, "type": "DEFERRED", "appId": 113, "emailCampaignId": 115429485}, "emitted_at": 1655280721652} +{"stream": "email_events", "data": {"appName": "Batch", "attempt": 3, "response": "", "created": 1615505015542, "id": "4f0583ba-b95e-35c1-b6fa-4326128b2861", "recipient": "michael.scott@dundermifflin.com", "sentBy": {"id": "33a9449c-9c37-4dd9-b3f3-0f8fc9a9d2a2", "created": 1615504687764}, "smtpId": null, "portalId": 8727216, "type": "DEFERRED", "appId": 113, "emailCampaignId": 115429485}, "emitted_at": 1655280721653} +{"stream": "email_events", "data": {"appName": "Batch", "attempt": 1, "response": "", "created": 1615504759396, "id": "deb20a2f-9594-3551-9551-ff8322da7916", "recipient": "michael.scott@dundermifflin.com", "sentBy": {"id": "33a9449c-9c37-4dd9-b3f3-0f8fc9a9d2a2", "created": 1615504687764}, "smtpId": null, "portalId": 8727216, "type": "DEFERRED", "appId": 113, "emailCampaignId": 115429485}, "emitted_at": 1655280721653} +{"stream": "email_events", "data": {"appName": "Batch", "created": 1615504759396, "attempt": 1, "id": "f96d2144-fc2a-3da7-a11a-5a0a1e893371", "recipient": "michael.scott@dundermifflin.com", "sentBy": {"id": "33a9449c-9c37-4dd9-b3f3-0f8fc9a9d2a2", "created": 1615504687764}, "smtpId": null, "portalId": 8727216, "type": "PROCESSED", "appId": 113, "emailCampaignId": 115429485}, "emitted_at": 1655280721654} +{"stream": "email_events", "data": {"appName": "BatchTest", "duration": 0, "created": 1634229850314, "browser": {"name": "Google Image Cache", "family": "Google Image Cache", "producer": "", "producerUrl": "", "type": "Proxy", "url": "", "version": []}, "deviceType": "COMPUTER", "userAgent": "Mozilla/5.0 (Windows NT 5.1; rv:11.0) Gecko Firefox/11.0 (via ggpht.com GoogleImageProxy)", "location": {"country": "Unknown", "state": "Unknown", "city": "Unknown", "zipcode": "Unknown"}, "id": "4823cd9e-032d-3d68-99f9-b77bd33a9390", "sentBy": {"id": "e98ee9c2-9ea4-4f7c-84e3-caace1c0a68b", "created": 1634042982506}, "smtpId": null, "recipient": "integration-test@airbyte.io", "portalId": 8727216, "type": "OPEN", "filteredEvent": false, "appId": 20053, "emailCampaignId": 2}, "emitted_at": 1655280723832} +{"stream": "email_events", "data": {"appName": "BatchTest", "duration": 5263, "created": 1634119229332, "browser": {"name": "Outlook 2019", "family": "Outlook 2019", "producer": "Microsoft Corporation.", "producerUrl": "https://www.microsoft.com/about/", "type": "Email client", "url": "https://en.wikipedia.org/wiki/Microsoft_Outlook", "version": [""]}, "deviceType": "COMPUTER", "userAgent": "Microsoft Office/16.0 (Microsoft Outlook 16.0.14326; Pro), Mozilla/4.0 (compatible; ms-office; MSOffice rmj)", "location": {"country": "UKRAINE", "state": "kyiv", "city": "kiev", "latitude": 50.458, "longitude": 30.5303}, "id": "d78765aa-f8c9-354c-a4a6-075dcee37bd1", "sentBy": {"id": "e98ee9c2-9ea4-4f7c-84e3-caace1c0a68b", "created": 1634042982506}, "smtpId": null, "recipient": "integration-test@airbyte.io", "portalId": 8727216, "type": "OPEN", "filteredEvent": false, "appId": 20053, "emailCampaignId": 2}, "emitted_at": 1655280723833} +{"stream": "email_events", "data": {"appName": "BatchTest", "duration": 0, "created": 1634064087206, "browser": {"name": "Google Image Cache", "family": "Google Image Cache", "producer": "", "producerUrl": "", "type": "Proxy", "url": "", "version": []}, "deviceType": "COMPUTER", "userAgent": "Mozilla/5.0 (Windows NT 5.1; rv:11.0) Gecko Firefox/11.0 (via ggpht.com GoogleImageProxy)", "location": {"country": "Unknown", "state": "Unknown", "city": "Unknown", "zipcode": "Unknown"}, "id": "d2f5ba83-17e9-3a33-b1cc-1c0bd7aaf2da", "sentBy": {"id": "e98ee9c2-9ea4-4f7c-84e3-caace1c0a68b", "created": 1634042982506}, "smtpId": null, "recipient": "integration-test@airbyte.io", "portalId": 8727216, "type": "OPEN", "filteredEvent": false, "appId": 20053, "emailCampaignId": 2}, "emitted_at": 1655280723834} +{"stream": "email_events", "data": {"appName": "BatchTest", "duration": 0, "created": 1634052748873, "browser": {"name": "Google Image Cache", "family": "Google Image Cache", "producer": "", "producerUrl": "", "type": "Proxy", "url": "", "version": []}, "deviceType": "COMPUTER", "userAgent": "Mozilla/5.0 (Windows NT 5.1; rv:11.0) Gecko Firefox/11.0 (via ggpht.com GoogleImageProxy)", "location": {"country": "Unknown", "state": "Unknown", "city": "Unknown", "zipcode": "Unknown"}, "id": "a3ec758a-10c8-3b11-86b2-1e0daed7ef64", "sentBy": {"id": "e98ee9c2-9ea4-4f7c-84e3-caace1c0a68b", "created": 1634042982506}, "smtpId": null, "recipient": "integration-test@airbyte.io", "portalId": 8727216, "type": "OPEN", "filteredEvent": false, "appId": 20053, "emailCampaignId": 2}, "emitted_at": 1655280723834} +{"stream": "email_events", "data": {"created": 1634051145732, "sourceId": "SOURCE_HUBSPOT_CUSTOMER", "subscriptions": [], "portalSubscriptionStatus": "SUBSCRIBED", "recipient": "testingapicontact_19@hubspot.com", "source": "SOURCE_HUBSPOT_CUSTOMER", "id": "569ea4bb-eee0-4718-9853-644b676101c9", "portalId": 8727216, "type": "STATUSCHANGE", "appId": 0, "emailCampaignId": 0}, "emitted_at": 1655280723834} +{"stream": "email_events", "data": {"created": 1634051145732, "sourceId": "SOURCE_HUBSPOT_CUSTOMER", "subscriptions": [{"id": 23704464, "status": "UNSUBSCRIBED", "legalBasisChange": {"legalBasisType": "NON_GDPR", "legalBasisExplanation": "The contact was opted out by a HubSpot user", "optState": "OPT_OUT"}}], "recipient": "testingapicontact_19@hubspot.com", "source": "SOURCE_HUBSPOT_CUSTOMER", "id": "66677dbc-3d7b-4a4e-8ed8-023dad7223a1", "portalId": 8727216, "type": "STATUSCHANGE", "appId": 0, "emailCampaignId": 0}, "emitted_at": 1655280723835} +{"stream": "email_events", "data": {"created": 1634051105091, "sourceId": "SOURCE_HUBSPOT_CUSTOMER", "subscriptions": [], "portalSubscriptionStatus": "SUBSCRIBED", "recipient": "testingapicontact_19@hubspot.com", "source": "SOURCE_HUBSPOT_CUSTOMER", "id": "331da840-e177-42e7-9758-08f430a40428", "portalId": 8727216, "type": "STATUSCHANGE", "appId": 0, "emailCampaignId": 0}, "emitted_at": 1655280723835} +{"stream": "email_events", "data": {"created": 1634051105091, "sourceId": "SOURCE_HUBSPOT_CUSTOMER", "subscriptions": [{"id": 23704464, "status": "SUBSCRIBED", "legalBasisChange": {"legalBasisType": "LEGITIMATE_INTEREST_CLIENT", "legalBasisExplanation": "Test", "optState": "OPT_IN"}}], "recipient": "testingapicontact_19@hubspot.com", "source": "SOURCE_HUBSPOT_CUSTOMER", "id": "f27707ce-833e-4d47-b0fe-2cd9cbec786e", "portalId": 8727216, "type": "STATUSCHANGE", "appId": 0, "emailCampaignId": 0}, "emitted_at": 1655280723835} +{"stream": "email_events", "data": {"appName": "BatchTest", "duration": 35376, "created": 1634050837720, "browser": {"name": "unknown", "family": "unknown", "producer": "unknown", "producerUrl": "unknown", "type": "unknown", "url": "unknown", "version": [""]}, "deviceType": "COMPUTER", "userAgent": "Mozilla/5.0", "location": {"country": "UKRAINE", "state": "kyiv", "city": "kiev", "latitude": 50.458, "longitude": 30.5303}, "id": "ee27badd-cfe8-3278-ba4f-b2032b95ceb4", "sentBy": {"id": "e98ee9c2-9ea4-4f7c-84e3-caace1c0a68b", "created": 1634042982506}, "smtpId": null, "recipient": "integration-test@airbyte.io", "portalId": 8727216, "type": "OPEN", "filteredEvent": true, "appId": 20053, "emailCampaignId": 2}, "emitted_at": 1655280723835} +{"stream": "email_events", "data": {"appName": "BatchTest", "duration": 36763, "created": 1634049479939, "browser": {"name": "unknown", "family": "unknown", "producer": "unknown", "producerUrl": "unknown", "type": "unknown", "url": "unknown", "version": [""]}, "deviceType": "COMPUTER", "userAgent": "Mozilla/5.0", "location": {"country": "UKRAINE", "state": "kyiv", "city": "kiev", "latitude": 50.458, "longitude": 30.5303}, "id": "0ee6ffc9-160f-33bb-8e17-fc874429b206", "sentBy": {"id": "e98ee9c2-9ea4-4f7c-84e3-caace1c0a68b", "created": 1634042982506}, "smtpId": null, "recipient": "integration-test@airbyte.io", "portalId": 8727216, "type": "OPEN", "filteredEvent": true, "appId": 20053, "emailCampaignId": 2}, "emitted_at": 1655280723835} +{"stream": "email_events", "data": {"appName": "BatchTest", "duration": 0, "created": 1634046233479, "browser": {"name": "Google Image Cache", "family": "Google Image Cache", "producer": "", "producerUrl": "", "type": "Proxy", "url": "", "version": []}, "deviceType": "COMPUTER", "userAgent": "Mozilla/5.0 (Windows NT 5.1; rv:11.0) Gecko Firefox/11.0 (via ggpht.com GoogleImageProxy)", "location": {"country": "Unknown", "state": "Unknown", "city": "Unknown", "zipcode": "Unknown"}, "id": "a3c92fa4-20d1-345c-92fd-d5485ca0fb8b", "sentBy": {"id": "e98ee9c2-9ea4-4f7c-84e3-caace1c0a68b", "created": 1634042982506}, "smtpId": null, "recipient": "integration-test@airbyte.io", "portalId": 8727216, "type": "OPEN", "filteredEvent": false, "appId": 20053, "emailCampaignId": 2}, "emitted_at": 1655280723835} +{"stream": "email_events", "data": {"appName": "BatchTest", "duration": 0, "created": 1634046230712, "browser": {"name": "Google Image Cache", "family": "Google Image Cache", "producer": "", "producerUrl": "", "type": "Proxy", "url": "", "version": []}, "deviceType": "COMPUTER", "userAgent": "Mozilla/5.0 (Windows NT 5.1; rv:11.0) Gecko Firefox/11.0 (via ggpht.com GoogleImageProxy)", "location": {"country": "Unknown", "state": "Unknown", "city": "Unknown", "zipcode": "Unknown"}, "id": "ab1a6976-76b1-304d-9f4c-6d73e8bc52dc", "sentBy": {"id": "e98ee9c2-9ea4-4f7c-84e3-caace1c0a68b", "created": 1634042982506}, "smtpId": null, "recipient": "integration-test@airbyte.io", "portalId": 8727216, "type": "OPEN", "filteredEvent": false, "appId": 20053, "emailCampaignId": 2}, "emitted_at": 1655280723836} +{"stream": "email_events", "data": {"appName": "BatchTest", "duration": 0, "created": 1634042988255, "browser": {"name": "Microsoft Edge 12.246", "family": "Microsoft Edge", "producer": "Microsoft Corporation.", "producerUrl": "https://www.microsoft.com/about/", "type": "Browser", "url": "https://en.wikipedia.org/wiki/Microsoft_Edge", "version": ["12.246"]}, "deviceType": "COMPUTER", "userAgent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36 Edge/12.246 Mozilla/5.0", "location": {"country": "UNITED STATES", "state": "california", "city": "mountain view", "latitude": 37.40599, "longitude": -122.078514, "zipcode": "94043"}, "id": "b750d85b-fec1-38f4-80e1-47cb01672e72", "sentBy": {"id": "e98ee9c2-9ea4-4f7c-84e3-caace1c0a68b", "created": 1634042982506}, "smtpId": null, "recipient": "integration-test@airbyte.io", "portalId": 8727216, "type": "OPEN", "filteredEvent": true, "appId": 20053, "emailCampaignId": 2}, "emitted_at": 1655280723836} +{"stream": "email_events", "data": {"appName": "BatchTest", "duration": 0, "created": 1634042987754, "browser": {"name": "Microsoft Edge 12.246", "family": "Microsoft Edge", "producer": "Microsoft Corporation.", "producerUrl": "https://www.microsoft.com/about/", "type": "Browser", "url": "https://en.wikipedia.org/wiki/Microsoft_Edge", "version": ["12.246"]}, "deviceType": "COMPUTER", "userAgent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36 Edge/12.246 Mozilla/5.0", "location": {"country": "UNITED STATES", "state": "california", "city": "mountain view", "latitude": 37.40599, "longitude": -122.078514, "zipcode": "94043"}, "id": "61526204-2a0b-3b20-a0ba-70381fe3267d", "sentBy": {"id": "e98ee9c2-9ea4-4f7c-84e3-caace1c0a68b", "created": 1634042982506}, "smtpId": null, "recipient": "integration-test@airbyte.io", "portalId": 8727216, "type": "OPEN", "filteredEvent": true, "appId": 20053, "emailCampaignId": 2}, "emitted_at": 1655280723836} +{"stream": "email_events", "data": {"appName": "BatchTest", "duration": 0, "created": 1634042987282, "browser": {"name": "Microsoft Edge 12.246", "family": "Microsoft Edge", "producer": "Microsoft Corporation.", "producerUrl": "https://www.microsoft.com/about/", "type": "Browser", "url": "https://en.wikipedia.org/wiki/Microsoft_Edge", "version": ["12.246"]}, "deviceType": "COMPUTER", "userAgent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36 Edge/12.246 Mozilla/5.0", "location": {"country": "UNITED STATES", "state": "california", "city": "mountain view", "latitude": 37.40599, "longitude": -122.078514, "zipcode": "94043"}, "id": "41ab6d71-3aa7-30bc-95a6-9af9c719a69f", "sentBy": {"id": "e98ee9c2-9ea4-4f7c-84e3-caace1c0a68b", "created": 1634042982506}, "smtpId": null, "recipient": "integration-test@airbyte.io", "portalId": 8727216, "type": "OPEN", "filteredEvent": true, "appId": 20053, "emailCampaignId": 2}, "emitted_at": 1655280723836} +{"stream": "email_events", "data": {"appName": "BatchTest", "duration": 0, "created": 1634042986743, "browser": {"name": "Microsoft Edge 12.246", "family": "Microsoft Edge", "producer": "Microsoft Corporation.", "producerUrl": "https://www.microsoft.com/about/", "type": "Browser", "url": "https://en.wikipedia.org/wiki/Microsoft_Edge", "version": ["12.246"]}, "deviceType": "COMPUTER", "userAgent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36 Edge/12.246 Mozilla/5.0", "location": {"country": "UNITED STATES", "state": "", "city": ""}, "id": "d0c2f47d-bebf-38f6-bf1c-f556795c903e", "sentBy": {"id": "e98ee9c2-9ea4-4f7c-84e3-caace1c0a68b", "created": 1634042982506}, "smtpId": null, "recipient": "integration-test@airbyte.io", "portalId": 8727216, "type": "OPEN", "filteredEvent": true, "appId": 20053, "emailCampaignId": 2}, "emitted_at": 1655280723836} +{"stream": "email_events", "data": {"appName": "BatchTest", "duration": 0, "created": 1634042986251, "browser": {"name": "Microsoft Edge 12.246", "family": "Microsoft Edge", "producer": "Microsoft Corporation.", "producerUrl": "https://www.microsoft.com/about/", "type": "Browser", "url": "https://en.wikipedia.org/wiki/Microsoft_Edge", "version": ["12.246"]}, "deviceType": "COMPUTER", "userAgent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36 Edge/12.246 Mozilla/5.0", "location": {"country": "UNITED STATES", "state": "", "city": ""}, "id": "e40954e4-dc04-3e67-a833-d3283fef7758", "sentBy": {"id": "e98ee9c2-9ea4-4f7c-84e3-caace1c0a68b", "created": 1634042982506}, "smtpId": null, "recipient": "integration-test@airbyte.io", "portalId": 8727216, "type": "OPEN", "filteredEvent": true, "appId": 20053, "emailCampaignId": 2}, "emitted_at": 1655280723837} +{"stream": "email_events", "data": {"appName": "BatchTest", "duration": 0, "created": 1634042985742, "browser": {"name": "Microsoft Edge 12.246", "family": "Microsoft Edge", "producer": "Microsoft Corporation.", "producerUrl": "https://www.microsoft.com/about/", "type": "Browser", "url": "https://en.wikipedia.org/wiki/Microsoft_Edge", "version": ["12.246"]}, "deviceType": "COMPUTER", "userAgent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36 Edge/12.246 Mozilla/5.0", "location": {"country": "UNITED STATES", "state": "", "city": ""}, "id": "67b212dd-61dd-3894-9443-50083096f51e", "sentBy": {"id": "e98ee9c2-9ea4-4f7c-84e3-caace1c0a68b", "created": 1634042982506}, "smtpId": null, "recipient": "integration-test@airbyte.io", "portalId": 8727216, "type": "OPEN", "filteredEvent": true, "appId": 20053, "emailCampaignId": 2}, "emitted_at": 1655280723837} +{"stream": "email_events", "data": {"appName": "BatchTest", "duration": 0, "created": 1634042985327, "browser": {"name": "Microsoft Edge 12.246", "family": "Microsoft Edge", "producer": "Microsoft Corporation.", "producerUrl": "https://www.microsoft.com/about/", "type": "Browser", "url": "https://en.wikipedia.org/wiki/Microsoft_Edge", "version": ["12.246"]}, "deviceType": "COMPUTER", "userAgent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36 Edge/12.246 Mozilla/5.0", "location": {"country": "UNITED STATES", "state": "", "city": ""}, "id": "e12b770c-1c58-3480-9f0a-38600dd625ba", "sentBy": {"id": "e98ee9c2-9ea4-4f7c-84e3-caace1c0a68b", "created": 1634042982506}, "smtpId": null, "recipient": "integration-test@airbyte.io", "portalId": 8727216, "type": "OPEN", "filteredEvent": true, "appId": 20053, "emailCampaignId": 2}, "emitted_at": 1655280723837} +{"stream": "email_events", "data": {"appName": "BatchTest", "response": "250 2.0.0 OK t8si8990139qta.221 - gsmtp", "created": 1634042983145, "attempt": 1, "id": "15347d6e-b728-38c2-9846-e02047aa90de", "sentBy": {"id": "e98ee9c2-9ea4-4f7c-84e3-caace1c0a68b", "created": 1634042982506}, "smtpId": null, "recipient": "integration-test@airbyte.io", "portalId": 8727216, "type": "DELIVERED", "appId": 20053, "emailCampaignId": 2}, "emitted_at": 1655280723837} +{"stream": "forms", "data": {"id": "984418a9-3f1b-43eb-bbdd-2ced85fb88e2", "name": "Test Form 8", "createdAt": "2021-01-14T14:46:10.680Z", "updatedAt": "2021-01-14T14:46:10.680Z", "archived": false, "fieldGroups": [{"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "firstname", "label": "First Name 8", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "lastname", "label": "Last Name 8", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "adress_9", "label": "Adress 9", "required": false, "hidden": false, "fieldType": "single_line_text"}]}], "configuration": {"language": "en", "cloneable": true, "postSubmitAction": {"type": "thank_you", "value": ""}, "editable": true, "archivable": true, "recaptchaEnabled": false, "notifyContactOwner": false, "notifyRecipients": [], "createNewContactForNewEmail": false, "prePopulateKnownValues": true, "allowLinkToResetKnownValues": false}, "displayOptions": {"renderRawHtml": false, "theme": "default_style", "submitButtonText": "Submit", "style": {"fontFamily": "arial, helvetica, sans-serif", "backgroundWidth": "100%", "labelTextColor": "#33475b", "labelTextSize": "11px", "helpTextColor": "#7C98B6", "helpTextSize": "11px", "legalConsentTextColor": "#33475b", "legalConsentTextSize": "14px", "submitColor": "#ff7a59", "submitAlignment": "left", "submitFontColor": "#ffffff", "submitSize": "12px"}, "cssClass": null}, "legalConsentOptions": {"type": "none"}, "formType": "hubspot"}, "emitted_at": 1655280732854} +{"stream": "forms", "data": {"id": "5b15ebb8-8474-45c7-a4cd-e1d19e0f14aa", "name": "Test Form 11", "createdAt": "2021-10-12T14:24:16.318Z", "updatedAt": "2021-10-12T14:24:27.719Z", "archived": false, "fieldGroups": [{"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "firstname", "label": "First Name 10", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "lastname", "label": "Last Name 10", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "adress_11", "label": "Adress 11", "required": false, "hidden": false, "fieldType": "single_line_text"}]}], "configuration": {"language": "en", "cloneable": true, "postSubmitAction": {"type": "thank_you", "value": "Thanks for submitting the form."}, "editable": true, "archivable": true, "recaptchaEnabled": false, "notifyContactOwner": false, "notifyRecipients": [], "createNewContactForNewEmail": false, "prePopulateKnownValues": true, "allowLinkToResetKnownValues": false}, "displayOptions": {"renderRawHtml": false, "theme": "default_style", "submitButtonText": "Submit", "style": {"fontFamily": "arial, helvetica, sans-serif", "backgroundWidth": "100%", "labelTextColor": "#33475b", "labelTextSize": "11px", "helpTextColor": "#7C98B6", "helpTextSize": "11px", "legalConsentTextColor": "#33475b", "legalConsentTextSize": "14px", "submitColor": "#ff7a59", "submitAlignment": "left", "submitFontColor": "#ffffff", "submitSize": "12px"}, "cssClass": null}, "legalConsentOptions": {"type": "none"}, "formType": "hubspot"}, "emitted_at": 1655280732856} +{"stream": "forms", "data": {"id": "e9cb252e-d624-4b4d-b1e0-43294c525231", "name": "Test Form 9", "createdAt": "2021-01-14T14:46:11.360Z", "updatedAt": "2021-01-14T14:46:11.360Z", "archived": false, "fieldGroups": [{"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "firstname", "label": "First Name 9", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "lastname", "label": "Last Name 9", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "adress_10", "label": "Adress 10", "required": false, "hidden": false, "fieldType": "single_line_text"}]}], "configuration": {"language": "en", "cloneable": true, "postSubmitAction": {"type": "thank_you", "value": ""}, "editable": true, "archivable": true, "recaptchaEnabled": false, "notifyContactOwner": false, "notifyRecipients": [], "createNewContactForNewEmail": false, "prePopulateKnownValues": true, "allowLinkToResetKnownValues": false}, "displayOptions": {"renderRawHtml": false, "theme": "default_style", "submitButtonText": "Submit", "style": {"fontFamily": "arial, helvetica, sans-serif", "backgroundWidth": "100%", "labelTextColor": "#33475b", "labelTextSize": "11px", "helpTextColor": "#7C98B6", "helpTextSize": "11px", "legalConsentTextColor": "#33475b", "legalConsentTextSize": "14px", "submitColor": "#ff7a59", "submitAlignment": "left", "submitFontColor": "#ffffff", "submitSize": "12px"}, "cssClass": null}, "legalConsentOptions": {"type": "none"}, "formType": "hubspot"}, "emitted_at": 1655280732857} +{"stream": "forms", "data": {"id": "47465ffa-f8d6-444c-a1a3-3c690bf0cf33", "name": "Test Form 15", "createdAt": "2021-10-12T14:24:55.230Z", "updatedAt": "2021-10-12T14:24:55.230Z", "archived": false, "fieldGroups": [{"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "firstname", "label": "First Name 10", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "lastname", "label": "Last Name 10", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "adress_11", "label": "Adress 11", "required": false, "hidden": false, "fieldType": "single_line_text"}]}], "configuration": {"language": "en", "cloneable": true, "postSubmitAction": {"type": "thank_you", "value": "Thanks for submitting the form."}, "editable": true, "archivable": true, "recaptchaEnabled": false, "notifyContactOwner": false, "notifyRecipients": [], "createNewContactForNewEmail": false, "prePopulateKnownValues": true, "allowLinkToResetKnownValues": false}, "displayOptions": {"renderRawHtml": false, "theme": "default_style", "submitButtonText": "Submit", "style": {"fontFamily": "arial, helvetica, sans-serif", "backgroundWidth": "100%", "labelTextColor": "#33475b", "labelTextSize": "11px", "helpTextColor": "#7C98B6", "helpTextSize": "11px", "legalConsentTextColor": "#33475b", "legalConsentTextSize": "14px", "submitColor": "#ff7a59", "submitAlignment": "left", "submitFontColor": "#ffffff", "submitSize": "12px"}, "cssClass": null}, "legalConsentOptions": {"type": "none"}, "formType": "hubspot"}, "emitted_at": 1655280732858} +{"stream": "forms", "data": {"id": "3b8bd96a-9972-440d-9543-cc0f14439042", "name": "Test Form 14", "createdAt": "2021-10-12T14:24:49.416Z", "updatedAt": "2021-10-12T14:24:49.416Z", "archived": false, "fieldGroups": [{"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "firstname", "label": "First Name 10", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "lastname", "label": "Last Name 10", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "adress_11", "label": "Adress 11", "required": false, "hidden": false, "fieldType": "single_line_text"}]}], "configuration": {"language": "en", "cloneable": true, "postSubmitAction": {"type": "thank_you", "value": "Thanks for submitting the form."}, "editable": true, "archivable": true, "recaptchaEnabled": false, "notifyContactOwner": false, "notifyRecipients": [], "createNewContactForNewEmail": false, "prePopulateKnownValues": true, "allowLinkToResetKnownValues": false}, "displayOptions": {"renderRawHtml": false, "theme": "default_style", "submitButtonText": "Submit", "style": {"fontFamily": "arial, helvetica, sans-serif", "backgroundWidth": "100%", "labelTextColor": "#33475b", "labelTextSize": "11px", "helpTextColor": "#7C98B6", "helpTextSize": "11px", "legalConsentTextColor": "#33475b", "legalConsentTextSize": "14px", "submitColor": "#ff7a59", "submitAlignment": "left", "submitFontColor": "#ffffff", "submitSize": "12px"}, "cssClass": null}, "legalConsentOptions": {"type": "none"}, "formType": "hubspot"}, "emitted_at": 1655280732860} +{"stream": "forms", "data": {"id": "1b6319b8-82ef-4fd5-b2a7-3882ecd0ba92", "name": "Test Form 5", "createdAt": "2021-01-14T14:46:09.656Z", "updatedAt": "2021-01-14T14:46:09.656Z", "archived": false, "fieldGroups": [{"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "firstname", "label": "First Name 5", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "lastname", "label": "Last Name 5", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "adress_6", "label": "Adress 6", "required": false, "hidden": false, "fieldType": "single_line_text"}]}], "configuration": {"language": "en", "cloneable": true, "postSubmitAction": {"type": "thank_you", "value": ""}, "editable": true, "archivable": true, "recaptchaEnabled": false, "notifyContactOwner": false, "notifyRecipients": [], "createNewContactForNewEmail": false, "prePopulateKnownValues": true, "allowLinkToResetKnownValues": false}, "displayOptions": {"renderRawHtml": false, "theme": "default_style", "submitButtonText": "Submit", "style": {"fontFamily": "arial, helvetica, sans-serif", "backgroundWidth": "100%", "labelTextColor": "#33475b", "labelTextSize": "11px", "helpTextColor": "#7C98B6", "helpTextSize": "11px", "legalConsentTextColor": "#33475b", "legalConsentTextSize": "14px", "submitColor": "#ff7a59", "submitAlignment": "left", "submitFontColor": "#ffffff", "submitSize": "12px"}, "cssClass": null}, "legalConsentOptions": {"type": "none"}, "formType": "hubspot"}, "emitted_at": 1655280732861} +{"stream": "forms", "data": {"id": "2244dd4d-fd8c-479b-a6ad-3328b2b8ae94", "name": "Test Form 17", "createdAt": "2021-10-12T14:25:08.964Z", "updatedAt": "2021-10-12T14:25:08.964Z", "archived": false, "fieldGroups": [{"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "firstname", "label": "First Name 10", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "lastname", "label": "Last Name 10", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "adress_11", "label": "Adress 11", "required": false, "hidden": false, "fieldType": "single_line_text"}]}], "configuration": {"language": "en", "cloneable": true, "postSubmitAction": {"type": "thank_you", "value": "Thanks for submitting the form."}, "editable": true, "archivable": true, "recaptchaEnabled": false, "notifyContactOwner": false, "notifyRecipients": [], "createNewContactForNewEmail": false, "prePopulateKnownValues": true, "allowLinkToResetKnownValues": false}, "displayOptions": {"renderRawHtml": false, "theme": "default_style", "submitButtonText": "Submit", "style": {"fontFamily": "arial, helvetica, sans-serif", "backgroundWidth": "100%", "labelTextColor": "#33475b", "labelTextSize": "11px", "helpTextColor": "#7C98B6", "helpTextSize": "11px", "legalConsentTextColor": "#33475b", "legalConsentTextSize": "14px", "submitColor": "#ff7a59", "submitAlignment": "left", "submitFontColor": "#ffffff", "submitSize": "12px"}, "cssClass": null}, "legalConsentOptions": {"type": "none"}, "formType": "hubspot"}, "emitted_at": 1655280732862} +{"stream": "forms", "data": {"id": "7bf12799-c984-46f5-9112-c2d652483fce", "name": "Test Form 20", "createdAt": "2021-10-12T14:25:26.866Z", "updatedAt": "2021-10-12T14:25:26.866Z", "archived": false, "fieldGroups": [{"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "firstname", "label": "First Name 10", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "lastname", "label": "Last Name 10", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "adress_11", "label": "Adress 11", "required": false, "hidden": false, "fieldType": "single_line_text"}]}], "configuration": {"language": "en", "cloneable": true, "postSubmitAction": {"type": "thank_you", "value": "Thanks for submitting the form."}, "editable": true, "archivable": true, "recaptchaEnabled": false, "notifyContactOwner": false, "notifyRecipients": [], "createNewContactForNewEmail": false, "prePopulateKnownValues": true, "allowLinkToResetKnownValues": false}, "displayOptions": {"renderRawHtml": false, "theme": "default_style", "submitButtonText": "Submit", "style": {"fontFamily": "arial, helvetica, sans-serif", "backgroundWidth": "100%", "labelTextColor": "#33475b", "labelTextSize": "11px", "helpTextColor": "#7C98B6", "helpTextSize": "11px", "legalConsentTextColor": "#33475b", "legalConsentTextSize": "14px", "submitColor": "#ff7a59", "submitAlignment": "left", "submitFontColor": "#ffffff", "submitSize": "12px"}, "cssClass": null}, "legalConsentOptions": {"type": "none"}, "formType": "hubspot"}, "emitted_at": 1655280732863} +{"stream": "forms", "data": {"id": "a69652ea-f6db-476f-87e4-a159e43bfcf1", "name": "Test Form 3", "createdAt": "2021-01-14T14:46:09.010Z", "updatedAt": "2021-01-14T14:46:09.010Z", "archived": false, "fieldGroups": [{"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "firstname", "label": "First Name 3", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "lastname", "label": "Last Name 3", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "adress_4", "label": "Adress 4", "required": false, "hidden": false, "fieldType": "single_line_text"}]}], "configuration": {"language": "en", "cloneable": true, "postSubmitAction": {"type": "thank_you", "value": ""}, "editable": true, "archivable": true, "recaptchaEnabled": false, "notifyContactOwner": false, "notifyRecipients": [], "createNewContactForNewEmail": false, "prePopulateKnownValues": true, "allowLinkToResetKnownValues": false}, "displayOptions": {"renderRawHtml": false, "theme": "default_style", "submitButtonText": "Submit", "style": {"fontFamily": "arial, helvetica, sans-serif", "backgroundWidth": "100%", "labelTextColor": "#33475b", "labelTextSize": "11px", "helpTextColor": "#7C98B6", "helpTextSize": "11px", "legalConsentTextColor": "#33475b", "legalConsentTextSize": "14px", "submitColor": "#ff7a59", "submitAlignment": "left", "submitFontColor": "#ffffff", "submitSize": "12px"}, "cssClass": null}, "legalConsentOptions": {"type": "none"}, "formType": "hubspot"}, "emitted_at": 1655280732865} +{"stream": "forms", "data": {"id": "5acdcc7d-c22a-4ed3-baf8-309ee0a468f6", "name": "Test Form 2", "createdAt": "2021-01-14T14:46:08.708Z", "updatedAt": "2021-01-14T14:46:08.708Z", "archived": false, "fieldGroups": [{"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "firstname", "label": "First Name 2", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "lastname", "label": "Last Name 2", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "adress_3", "label": "Adress 3", "required": false, "hidden": false, "fieldType": "single_line_text"}]}], "configuration": {"language": "en", "cloneable": true, "postSubmitAction": {"type": "thank_you", "value": ""}, "editable": true, "archivable": true, "recaptchaEnabled": false, "notifyContactOwner": false, "notifyRecipients": [], "createNewContactForNewEmail": false, "prePopulateKnownValues": true, "allowLinkToResetKnownValues": false}, "displayOptions": {"renderRawHtml": false, "theme": "default_style", "submitButtonText": "Submit", "style": {"fontFamily": "arial, helvetica, sans-serif", "backgroundWidth": "100%", "labelTextColor": "#33475b", "labelTextSize": "11px", "helpTextColor": "#7C98B6", "helpTextSize": "11px", "legalConsentTextColor": "#33475b", "legalConsentTextSize": "14px", "submitColor": "#ff7a59", "submitAlignment": "left", "submitFontColor": "#ffffff", "submitSize": "12px"}, "cssClass": null}, "legalConsentOptions": {"type": "none"}, "formType": "hubspot"}, "emitted_at": 1655280732866} +{"stream": "forms", "data": {"id": "9fc39345-42d7-4369-b753-b015de928399", "name": "New form (March 15, 2021 12:55:56 PM)", "createdAt": "2021-03-15T10:55:56.900Z", "updatedAt": "2021-10-30T15:33:20.320Z", "archived": false, "fieldGroups": [{"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "email", "label": "Email", "required": true, "hidden": false, "fieldType": "email", "validation": {"blockedEmailDomains": [], "useDefaultBlockList": false}}]}], "configuration": {"language": "en", "cloneable": true, "postSubmitAction": {"type": "thank_you", "value": "Thanks for submitting the form."}, "editable": true, "archivable": true, "recaptchaEnabled": false, "notifyContactOwner": false, "notifyRecipients": ["12282590"], "createNewContactForNewEmail": false, "prePopulateKnownValues": true, "allowLinkToResetKnownValues": false}, "displayOptions": {"renderRawHtml": false, "theme": "default_style", "submitButtonText": "Submit", "style": {"fontFamily": "arial, helvetica, sans-serif", "backgroundWidth": "100%", "labelTextColor": "#33475b", "labelTextSize": "11px", "helpTextColor": "#7C98B6", "helpTextSize": "11px", "legalConsentTextColor": "#33475b", "legalConsentTextSize": "14px", "submitColor": "#ff7a59", "submitAlignment": "left", "submitFontColor": "#ffffff", "submitSize": "12px"}, "cssClass": "hs-form stacked"}, "legalConsentOptions": {"type": "none"}, "formType": "hubspot"}, "emitted_at": 1655280732867} +{"stream": "forms", "data": {"id": "28a293e1-0113-4d3c-a9e5-09043053b2a1", "name": "Test Form 13", "createdAt": "2021-10-12T14:24:43.336Z", "updatedAt": "2021-10-12T14:24:43.336Z", "archived": false, "fieldGroups": [{"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "firstname", "label": "First Name 10", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "lastname", "label": "Last Name 10", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "adress_11", "label": "Adress 11", "required": false, "hidden": false, "fieldType": "single_line_text"}]}], "configuration": {"language": "en", "cloneable": true, "postSubmitAction": {"type": "thank_you", "value": "Thanks for submitting the form."}, "editable": true, "archivable": true, "recaptchaEnabled": false, "notifyContactOwner": false, "notifyRecipients": [], "createNewContactForNewEmail": false, "prePopulateKnownValues": true, "allowLinkToResetKnownValues": false}, "displayOptions": {"renderRawHtml": false, "theme": "default_style", "submitButtonText": "Submit", "style": {"fontFamily": "arial, helvetica, sans-serif", "backgroundWidth": "100%", "labelTextColor": "#33475b", "labelTextSize": "11px", "helpTextColor": "#7C98B6", "helpTextSize": "11px", "legalConsentTextColor": "#33475b", "legalConsentTextSize": "14px", "submitColor": "#ff7a59", "submitAlignment": "left", "submitFontColor": "#ffffff", "submitSize": "12px"}, "cssClass": null}, "legalConsentOptions": {"type": "none"}, "formType": "hubspot"}, "emitted_at": 1655280732868} +{"stream": "forms", "data": {"id": "252302a1-68c8-4355-90e6-bd07667a212b", "name": "Test Form 19", "createdAt": "2021-10-12T14:25:20.956Z", "updatedAt": "2021-10-12T14:25:20.956Z", "archived": false, "fieldGroups": [{"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "firstname", "label": "First Name 10", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "lastname", "label": "Last Name 10", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "adress_11", "label": "Adress 11", "required": false, "hidden": false, "fieldType": "single_line_text"}]}], "configuration": {"language": "en", "cloneable": true, "postSubmitAction": {"type": "thank_you", "value": "Thanks for submitting the form."}, "editable": true, "archivable": true, "recaptchaEnabled": false, "notifyContactOwner": false, "notifyRecipients": [], "createNewContactForNewEmail": false, "prePopulateKnownValues": true, "allowLinkToResetKnownValues": false}, "displayOptions": {"renderRawHtml": false, "theme": "default_style", "submitButtonText": "Submit", "style": {"fontFamily": "arial, helvetica, sans-serif", "backgroundWidth": "100%", "labelTextColor": "#33475b", "labelTextSize": "11px", "helpTextColor": "#7C98B6", "helpTextSize": "11px", "legalConsentTextColor": "#33475b", "legalConsentTextSize": "14px", "submitColor": "#ff7a59", "submitAlignment": "left", "submitFontColor": "#ffffff", "submitSize": "12px"}, "cssClass": null}, "legalConsentOptions": {"type": "none"}, "formType": "hubspot"}, "emitted_at": 1655280732869} +{"stream": "forms", "data": {"id": "01ba116c-f3a8-4957-8884-ff0c4420af76", "name": "DemoForm", "createdAt": "2021-01-14T14:44:48.278Z", "updatedAt": "2021-01-14T14:44:48.278Z", "archived": false, "fieldGroups": [{"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "firstname", "label": "First Name", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "lastname", "label": "Last Name", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "adress_1", "label": "Adress 1", "required": false, "hidden": false, "fieldType": "single_line_text"}]}], "configuration": {"language": "en", "cloneable": true, "postSubmitAction": {"type": "thank_you", "value": ""}, "editable": true, "archivable": true, "recaptchaEnabled": false, "notifyContactOwner": false, "notifyRecipients": [], "createNewContactForNewEmail": false, "prePopulateKnownValues": true, "allowLinkToResetKnownValues": false}, "displayOptions": {"renderRawHtml": false, "theme": "default_style", "submitButtonText": "Submit", "style": {"fontFamily": "arial, helvetica, sans-serif", "backgroundWidth": "100%", "labelTextColor": "#33475b", "labelTextSize": "11px", "helpTextColor": "#7C98B6", "helpTextSize": "11px", "legalConsentTextColor": "#33475b", "legalConsentTextSize": "14px", "submitColor": "#ff7a59", "submitAlignment": "left", "submitFontColor": "#ffffff", "submitSize": "12px"}, "cssClass": null}, "legalConsentOptions": {"type": "none"}, "formType": "hubspot"}, "emitted_at": 1655280732870} +{"stream": "forms", "data": {"id": "e43ba6f6-ac95-467d-b65e-aafc19e324ed", "name": "Test Form 6", "createdAt": "2021-01-14T14:46:09.956Z", "updatedAt": "2021-01-14T14:46:09.956Z", "archived": false, "fieldGroups": [{"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "firstname", "label": "First Name 6", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "lastname", "label": "Last Name 6", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "adress_7", "label": "Adress 7", "required": false, "hidden": false, "fieldType": "single_line_text"}]}], "configuration": {"language": "en", "cloneable": true, "postSubmitAction": {"type": "thank_you", "value": ""}, "editable": true, "archivable": true, "recaptchaEnabled": false, "notifyContactOwner": false, "notifyRecipients": [], "createNewContactForNewEmail": false, "prePopulateKnownValues": true, "allowLinkToResetKnownValues": false}, "displayOptions": {"renderRawHtml": false, "theme": "default_style", "submitButtonText": "Submit", "style": {"fontFamily": "arial, helvetica, sans-serif", "backgroundWidth": "100%", "labelTextColor": "#33475b", "labelTextSize": "11px", "helpTextColor": "#7C98B6", "helpTextSize": "11px", "legalConsentTextColor": "#33475b", "legalConsentTextSize": "14px", "submitColor": "#ff7a59", "submitAlignment": "left", "submitFontColor": "#ffffff", "submitSize": "12px"}, "cssClass": null}, "legalConsentOptions": {"type": "none"}, "formType": "hubspot"}, "emitted_at": 1655280732872} +{"stream": "forms", "data": {"id": "b1d96614-f881-4837-a2b8-72a6fa1be9f1", "name": "Test Form 4", "createdAt": "2021-01-14T14:46:09.365Z", "updatedAt": "2021-01-14T14:46:09.365Z", "archived": false, "fieldGroups": [{"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "firstname", "label": "First Name 4", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "lastname", "label": "Last Name 4", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "adress_5", "label": "Adress 5", "required": false, "hidden": false, "fieldType": "single_line_text"}]}], "configuration": {"language": "en", "cloneable": true, "postSubmitAction": {"type": "thank_you", "value": ""}, "editable": true, "archivable": true, "recaptchaEnabled": false, "notifyContactOwner": false, "notifyRecipients": [], "createNewContactForNewEmail": false, "prePopulateKnownValues": true, "allowLinkToResetKnownValues": false}, "displayOptions": {"renderRawHtml": false, "theme": "default_style", "submitButtonText": "Submit", "style": {"fontFamily": "arial, helvetica, sans-serif", "backgroundWidth": "100%", "labelTextColor": "#33475b", "labelTextSize": "11px", "helpTextColor": "#7C98B6", "helpTextSize": "11px", "legalConsentTextColor": "#33475b", "legalConsentTextSize": "14px", "submitColor": "#ff7a59", "submitAlignment": "left", "submitFontColor": "#ffffff", "submitSize": "12px"}, "cssClass": null}, "legalConsentOptions": {"type": "none"}, "formType": "hubspot"}, "emitted_at": 1655280732873} +{"stream": "forms", "data": {"id": "f55a6b51-550e-491e-af4f-9e764a316acb", "name": "Test Form 12", "createdAt": "2021-10-12T14:24:36.891Z", "updatedAt": "2021-10-12T14:24:36.891Z", "archived": false, "fieldGroups": [{"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "firstname", "label": "First Name 10", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "lastname", "label": "Last Name 10", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "adress_11", "label": "Adress 11", "required": false, "hidden": false, "fieldType": "single_line_text"}]}], "configuration": {"language": "en", "cloneable": true, "postSubmitAction": {"type": "thank_you", "value": "Thanks for submitting the form."}, "editable": true, "archivable": true, "recaptchaEnabled": false, "notifyContactOwner": false, "notifyRecipients": [], "createNewContactForNewEmail": false, "prePopulateKnownValues": true, "allowLinkToResetKnownValues": false}, "displayOptions": {"renderRawHtml": false, "theme": "default_style", "submitButtonText": "Submit", "style": {"fontFamily": "arial, helvetica, sans-serif", "backgroundWidth": "100%", "labelTextColor": "#33475b", "labelTextSize": "11px", "helpTextColor": "#7C98B6", "helpTextSize": "11px", "legalConsentTextColor": "#33475b", "legalConsentTextSize": "14px", "submitColor": "#ff7a59", "submitAlignment": "left", "submitFontColor": "#ffffff", "submitSize": "12px"}, "cssClass": null}, "legalConsentOptions": {"type": "none"}, "formType": "hubspot"}, "emitted_at": 1655280732874} +{"stream": "forms", "data": {"id": "5fa4d8c1-c2ff-4029-8762-4cdf9e369021", "name": "Test Form 10", "createdAt": "2021-01-14T14:46:11.693Z", "updatedAt": "2021-01-14T14:46:11.693Z", "archived": false, "fieldGroups": [{"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "firstname", "label": "First Name 10", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "lastname", "label": "Last Name 10", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "adress_11", "label": "Adress 11", "required": false, "hidden": false, "fieldType": "single_line_text"}]}], "configuration": {"language": "en", "cloneable": true, "postSubmitAction": {"type": "thank_you", "value": ""}, "editable": true, "archivable": true, "recaptchaEnabled": false, "notifyContactOwner": false, "notifyRecipients": [], "createNewContactForNewEmail": false, "prePopulateKnownValues": true, "allowLinkToResetKnownValues": false}, "displayOptions": {"renderRawHtml": false, "theme": "default_style", "submitButtonText": "Submit", "style": {"fontFamily": "arial, helvetica, sans-serif", "backgroundWidth": "100%", "labelTextColor": "#33475b", "labelTextSize": "11px", "helpTextColor": "#7C98B6", "helpTextSize": "11px", "legalConsentTextColor": "#33475b", "legalConsentTextSize": "14px", "submitColor": "#ff7a59", "submitAlignment": "left", "submitFontColor": "#ffffff", "submitSize": "12px"}, "cssClass": null}, "legalConsentOptions": {"type": "none"}, "formType": "hubspot"}, "emitted_at": 1655280732875} +{"stream": "forms", "data": {"id": "ed51ef3e-a263-4aa8-bfd7-1c621595b911", "name": "Test Form 1", "createdAt": "2021-01-14T14:46:08.325Z", "updatedAt": "2021-01-14T14:46:08.325Z", "archived": false, "fieldGroups": [{"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "firstname", "label": "First Name 1", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "lastname", "label": "Last Name 1", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "adress_2", "label": "Adress 2", "required": false, "hidden": false, "fieldType": "single_line_text"}]}], "configuration": {"language": "en", "cloneable": true, "postSubmitAction": {"type": "thank_you", "value": ""}, "editable": true, "archivable": true, "recaptchaEnabled": false, "notifyContactOwner": false, "notifyRecipients": [], "createNewContactForNewEmail": false, "prePopulateKnownValues": true, "allowLinkToResetKnownValues": false}, "displayOptions": {"renderRawHtml": false, "theme": "default_style", "submitButtonText": "Submit", "style": {"fontFamily": "arial, helvetica, sans-serif", "backgroundWidth": "100%", "labelTextColor": "#33475b", "labelTextSize": "11px", "helpTextColor": "#7C98B6", "helpTextSize": "11px", "legalConsentTextColor": "#33475b", "legalConsentTextSize": "14px", "submitColor": "#ff7a59", "submitAlignment": "left", "submitFontColor": "#ffffff", "submitSize": "12px"}, "cssClass": null}, "legalConsentOptions": {"type": "none"}, "formType": "hubspot"}, "emitted_at": 1655280732875} +{"stream": "forms", "data": {"id": "4cd1473a-7f67-49b2-b63b-3167a1351bb3", "name": "Test Form 18", "createdAt": "2021-10-12T14:25:15.368Z", "updatedAt": "2021-10-12T14:25:15.368Z", "archived": false, "fieldGroups": [{"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "firstname", "label": "First Name 10", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "lastname", "label": "Last Name 10", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "adress_11", "label": "Adress 11", "required": false, "hidden": false, "fieldType": "single_line_text"}]}], "configuration": {"language": "en", "cloneable": true, "postSubmitAction": {"type": "thank_you", "value": "Thanks for submitting the form."}, "editable": true, "archivable": true, "recaptchaEnabled": false, "notifyContactOwner": false, "notifyRecipients": [], "createNewContactForNewEmail": false, "prePopulateKnownValues": true, "allowLinkToResetKnownValues": false}, "displayOptions": {"renderRawHtml": false, "theme": "default_style", "submitButtonText": "Submit", "style": {"fontFamily": "arial, helvetica, sans-serif", "backgroundWidth": "100%", "labelTextColor": "#33475b", "labelTextSize": "11px", "helpTextColor": "#7C98B6", "helpTextSize": "11px", "legalConsentTextColor": "#33475b", "legalConsentTextSize": "14px", "submitColor": "#ff7a59", "submitAlignment": "left", "submitFontColor": "#ffffff", "submitSize": "12px"}, "cssClass": null}, "legalConsentOptions": {"type": "none"}, "formType": "hubspot"}, "emitted_at": 1655280732876} +{"stream": "forms", "data": {"id": "db548333-64a9-4744-9999-e4cf63516007", "name": "Test Form 7", "createdAt": "2021-01-14T14:46:10.284Z", "updatedAt": "2021-01-14T14:46:10.284Z", "archived": false, "fieldGroups": [{"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "firstname", "label": "First Name 7", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "lastname", "label": "Last Name 7", "required": false, "hidden": false, "fieldType": "single_line_text"}]}, {"groupType": "default_group", "richTextType": "text", "fields": [{"objectTypeId": "0-1", "name": "adress_8", "label": "Adress 8", "required": false, "hidden": false, "fieldType": "single_line_text"}]}], "configuration": {"language": "en", "cloneable": true, "postSubmitAction": {"type": "thank_you", "value": ""}, "editable": true, "archivable": true, "recaptchaEnabled": false, "notifyContactOwner": false, "notifyRecipients": [], "createNewContactForNewEmail": false, "prePopulateKnownValues": true, "allowLinkToResetKnownValues": false}, "displayOptions": {"renderRawHtml": false, "theme": "default_style", "submitButtonText": "Submit", "style": {"fontFamily": "arial, helvetica, sans-serif", "backgroundWidth": "100%", "labelTextColor": "#33475b", "labelTextSize": "11px", "helpTextColor": "#7C98B6", "helpTextSize": "11px", "legalConsentTextColor": "#33475b", "legalConsentTextSize": "14px", "submitColor": "#ff7a59", "submitAlignment": "left", "submitFontColor": "#ffffff", "submitSize": "12px"}, "cssClass": null}, "legalConsentOptions": {"type": "none"}, "formType": "hubspot"}, "emitted_at": 1655280732877} +{"stream": "line_items", "data": {"id": "1188257157", "properties": {"amount": 123.0, "createdate": "2021-02-23T20:03:48.577000+00:00", "description": null, "discount": null, "hs_acv": 123.0, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_allow_buyer_selected_quantity": null, "hs_arr": 0.0, "hs_cost_of_goods_sold": null, "hs_created_by_user_id": 12282590, "hs_createdate": null, "hs_discount_percentage": null, "hs_external_id": null, "hs_images": null, "hs_lastmodifieddate": "2021-07-17T07:01:43.045000+00:00", "hs_line_item_currency_code": null, "hs_margin": 123.0, "hs_margin_acv": 123.0, "hs_margin_arr": 0.0, "hs_margin_mrr": 0.0, "hs_margin_tcv": 123.0, "hs_merged_object_ids": null, "hs_mrr": 0.0, "hs_object_id": 1188257157, "hs_position_on_quote": null, "hs_pre_discount_amount": 123.0, "hs_product_id": 646176423, "hs_recurring_billing_end_date": null, "hs_recurring_billing_number_of_payments": null, "hs_recurring_billing_period": null, "hs_recurring_billing_start_date": null, "hs_recurring_billing_terms": null, "hs_sku": null, "hs_sync_amount": null, "hs_tcv": 123.0, "hs_term_in_months": null, "hs_total_discount": 0.0, "hs_unique_creation_key": null, "hs_updated_by_user_id": 12282590, "hs_url": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hs_variant_id": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "name": "Test product 1", "price": 123.0, "quantity": 1.0, "recurringbillingfrequency": null, "tax": null, "test": null, "test_product_price": null}, "createdAt": "2021-02-23T20:03:48.577Z", "updatedAt": "2021-07-17T07:01:43.045Z", "archived": false}, "emitted_at": 1655280740831} +{"stream": "line_items", "data": {"id": "1188257165", "properties": {"amount": 10.0, "createdate": "2021-02-23T20:11:54.030000+00:00", "description": "Baseball hat, medium", "discount": null, "hs_acv": 10.0, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_allow_buyer_selected_quantity": null, "hs_arr": 0.0, "hs_cost_of_goods_sold": 5.0, "hs_created_by_user_id": 12282590, "hs_createdate": null, "hs_discount_percentage": null, "hs_external_id": null, "hs_images": null, "hs_lastmodifieddate": "2021-07-17T23:50:32.502000+00:00", "hs_line_item_currency_code": null, "hs_margin": 5.0, "hs_margin_acv": 5.0, "hs_margin_arr": 0.0, "hs_margin_mrr": 0.0, "hs_margin_tcv": 5.0, "hs_merged_object_ids": null, "hs_mrr": 0.0, "hs_object_id": 1188257165, "hs_position_on_quote": 0.0, "hs_pre_discount_amount": 10.0, "hs_product_id": 646778218, "hs_recurring_billing_end_date": null, "hs_recurring_billing_number_of_payments": null, "hs_recurring_billing_period": null, "hs_recurring_billing_start_date": null, "hs_recurring_billing_terms": null, "hs_sku": null, "hs_sync_amount": null, "hs_tcv": 10.0, "hs_term_in_months": null, "hs_total_discount": 0.0, "hs_unique_creation_key": null, "hs_updated_by_user_id": 12282590, "hs_url": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hs_variant_id": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "name": "Blue Hat", "price": 10.0, "quantity": 1.0, "recurringbillingfrequency": null, "tax": null, "test": null, "test_product_price": null}, "createdAt": "2021-02-23T20:11:54.030Z", "updatedAt": "2021-07-17T23:50:32.502Z", "archived": false}, "emitted_at": 1655280740832} +{"stream": "line_items", "data": {"id": "1188257309", "properties": {"amount": 10.0, "createdate": "2021-02-23T20:11:54.030000+00:00", "description": "Baseball hat, medium", "discount": null, "hs_acv": 10.0, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_allow_buyer_selected_quantity": null, "hs_arr": 0.0, "hs_cost_of_goods_sold": 5.0, "hs_created_by_user_id": 12282590, "hs_createdate": null, "hs_discount_percentage": null, "hs_external_id": null, "hs_images": null, "hs_lastmodifieddate": "2021-07-19T03:57:09.834000+00:00", "hs_line_item_currency_code": null, "hs_margin": 5.0, "hs_margin_acv": 5.0, "hs_margin_arr": 0.0, "hs_margin_mrr": 0.0, "hs_margin_tcv": 5.0, "hs_merged_object_ids": null, "hs_mrr": 0.0, "hs_object_id": 1188257309, "hs_position_on_quote": 0.0, "hs_pre_discount_amount": 10.0, "hs_product_id": 646778218, "hs_recurring_billing_end_date": null, "hs_recurring_billing_number_of_payments": null, "hs_recurring_billing_period": null, "hs_recurring_billing_start_date": null, "hs_recurring_billing_terms": null, "hs_sku": null, "hs_sync_amount": null, "hs_tcv": 10.0, "hs_term_in_months": null, "hs_total_discount": 0.0, "hs_unique_creation_key": null, "hs_updated_by_user_id": 12282590, "hs_url": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hs_variant_id": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "name": "Blue Hat", "price": 10.0, "quantity": 1.0, "recurringbillingfrequency": null, "tax": null, "test": null, "test_product_price": null}, "createdAt": "2021-02-23T20:11:54.030Z", "updatedAt": "2021-07-19T03:57:09.834Z", "archived": false}, "emitted_at": 1655280740832} +{"stream": "line_items", "data": {"id": "1510167477", "properties": {"amount": 20.0, "createdate": "2021-05-21T10:22:40.683000+00:00", "description": "Top hat, large", "discount": null, "hs_acv": 60.0, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_allow_buyer_selected_quantity": null, "hs_arr": 60.0, "hs_cost_of_goods_sold": 10.0, "hs_created_by_user_id": 12282590, "hs_createdate": null, "hs_discount_percentage": null, "hs_external_id": null, "hs_images": null, "hs_lastmodifieddate": "2022-02-23T08:09:16.555000+00:00", "hs_line_item_currency_code": null, "hs_margin": 10.0, "hs_margin_acv": 30.0, "hs_margin_arr": 30.0, "hs_margin_mrr": 10.0, "hs_margin_tcv": 30.0, "hs_merged_object_ids": null, "hs_mrr": 20.0, "hs_object_id": 1510167477, "hs_position_on_quote": null, "hs_pre_discount_amount": 20.0, "hs_product_id": 646777910, "hs_recurring_billing_end_date": "2022-05-28", "hs_recurring_billing_number_of_payments": null, "hs_recurring_billing_period": "P3M", "hs_recurring_billing_start_date": "2022-02-28", "hs_recurring_billing_terms": null, "hs_sku": null, "hs_sync_amount": null, "hs_tcv": 60.0, "hs_term_in_months": 3.0, "hs_total_discount": 0.0, "hs_unique_creation_key": null, "hs_updated_by_user_id": 12282590, "hs_url": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hs_variant_id": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "name": "Red Hat", "price": 20.0, "quantity": 1.0, "recurringbillingfrequency": "monthly", "tax": null, "test": "2022-02-24", "test_product_price": "2022-02-23"}, "createdAt": "2021-05-21T10:22:40.683Z", "updatedAt": "2022-02-23T08:09:16.555Z", "archived": false}, "emitted_at": 1655280740832} +{"stream": "line_items", "data": {"id": "2089468681", "properties": {"amount": 10.0, "createdate": "2021-10-12T13:50:13.117000+00:00", "description": "baseball hat, large", "discount": null, "hs_acv": 10.0, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_allow_buyer_selected_quantity": null, "hs_arr": 0.0, "hs_cost_of_goods_sold": 5.0, "hs_created_by_user_id": 12282590, "hs_createdate": null, "hs_discount_percentage": null, "hs_external_id": null, "hs_images": null, "hs_lastmodifieddate": "2021-10-12T13:50:13.117000+00:00", "hs_line_item_currency_code": null, "hs_margin": 5.0, "hs_margin_acv": 5.0, "hs_margin_arr": 0.0, "hs_margin_mrr": 0.0, "hs_margin_tcv": 5.0, "hs_merged_object_ids": null, "hs_mrr": 0.0, "hs_object_id": 2089468681, "hs_position_on_quote": 0.0, "hs_pre_discount_amount": 10.0, "hs_product_id": 646316535, "hs_recurring_billing_end_date": null, "hs_recurring_billing_number_of_payments": null, "hs_recurring_billing_period": null, "hs_recurring_billing_start_date": null, "hs_recurring_billing_terms": null, "hs_sku": null, "hs_sync_amount": null, "hs_tcv": 10.0, "hs_term_in_months": null, "hs_total_discount": 0.0, "hs_unique_creation_key": null, "hs_updated_by_user_id": 12282590, "hs_url": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hs_variant_id": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "name": "Green Hat", "price": 10.0, "quantity": 1.0, "recurringbillingfrequency": null, "tax": null, "test": null, "test_product_price": null}, "createdAt": "2021-10-12T13:50:13.117Z", "updatedAt": "2021-10-12T13:50:13.117Z", "archived": false}, "emitted_at": 1655280740833} +{"stream": "line_items", "data": {"id": "2089616136", "properties": {"amount": 10.0, "createdate": "2021-10-12T13:50:13.028000+00:00", "description": "baseball hat, large", "discount": null, "hs_acv": 10.0, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_allow_buyer_selected_quantity": null, "hs_arr": 0.0, "hs_cost_of_goods_sold": 5.0, "hs_created_by_user_id": 12282590, "hs_createdate": null, "hs_discount_percentage": null, "hs_external_id": null, "hs_images": null, "hs_lastmodifieddate": "2021-10-12T13:50:13.028000+00:00", "hs_line_item_currency_code": null, "hs_margin": 5.0, "hs_margin_acv": 5.0, "hs_margin_arr": 0.0, "hs_margin_mrr": 0.0, "hs_margin_tcv": 5.0, "hs_merged_object_ids": null, "hs_mrr": 0.0, "hs_object_id": 2089616136, "hs_position_on_quote": 0.0, "hs_pre_discount_amount": 10.0, "hs_product_id": 646316535, "hs_recurring_billing_end_date": null, "hs_recurring_billing_number_of_payments": null, "hs_recurring_billing_period": null, "hs_recurring_billing_start_date": null, "hs_recurring_billing_terms": null, "hs_sku": null, "hs_sync_amount": null, "hs_tcv": 10.0, "hs_term_in_months": null, "hs_total_discount": 0.0, "hs_unique_creation_key": null, "hs_updated_by_user_id": 12282590, "hs_url": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hs_variant_id": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "name": "Green Hat", "price": 10.0, "quantity": 1.0, "recurringbillingfrequency": null, "tax": null, "test": null, "test_product_price": null}, "createdAt": "2021-10-12T13:50:13.028Z", "updatedAt": "2021-10-12T13:50:13.028Z", "archived": false}, "emitted_at": 1655280740833} +{"stream": "line_items", "data": {"id": "2548174879", "properties": {"amount": 6000.0, "createdate": "2022-01-18T13:56:03.263000+00:00", "description": "test line item", "discount": null, "hs_acv": 72000.0, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_allow_buyer_selected_quantity": null, "hs_arr": 72000.0, "hs_cost_of_goods_sold": 3.0, "hs_created_by_user_id": null, "hs_createdate": null, "hs_discount_percentage": 50.0, "hs_external_id": null, "hs_images": null, "hs_lastmodifieddate": "2022-01-18T14:50:20.390000+00:00", "hs_line_item_currency_code": "USD", "hs_margin": 5994.0, "hs_margin_acv": 71928.0, "hs_margin_arr": 71928.0, "hs_margin_mrr": 5994.0, "hs_margin_tcv": 71928.0, "hs_merged_object_ids": null, "hs_mrr": 6000.0, "hs_object_id": 2548174879, "hs_position_on_quote": null, "hs_pre_discount_amount": 12000.0, "hs_product_id": null, "hs_recurring_billing_end_date": null, "hs_recurring_billing_number_of_payments": null, "hs_recurring_billing_period": null, "hs_recurring_billing_start_date": null, "hs_recurring_billing_terms": null, "hs_sku": "hs_sku_222_dsad_321f", "hs_sync_amount": null, "hs_tcv": 72000.0, "hs_term_in_months": null, "hs_total_discount": 6000.0, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_url": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hs_variant_id": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "name": "2 year implementation consultation test", "price": 6000.0, "quantity": 2.0, "recurringbillingfrequency": "monthly", "tax": null, "test": null, "test_product_price": null}, "createdAt": "2022-01-18T13:56:03.263Z", "updatedAt": "2022-01-18T14:50:20.390Z", "archived": false}, "emitted_at": 1655280740833} +{"stream": "line_items", "data": {"id": "2551248752", "properties": {"amount": 2.0, "createdate": "2022-01-19T07:21:03.560000+00:00", "description": "test line item", "discount": null, "hs_acv": 24.0, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_allow_buyer_selected_quantity": null, "hs_arr": 24.0, "hs_cost_of_goods_sold": 3.0, "hs_created_by_user_id": null, "hs_createdate": null, "hs_discount_percentage": 50.0, "hs_external_id": null, "hs_images": null, "hs_lastmodifieddate": "2022-01-19T07:21:03.560000+00:00", "hs_line_item_currency_code": "USD", "hs_margin": -4.0, "hs_margin_acv": -48.0, "hs_margin_arr": -48.0, "hs_margin_mrr": -4.0, "hs_margin_tcv": -48.0, "hs_merged_object_ids": null, "hs_mrr": 2.0, "hs_object_id": 2551248752, "hs_position_on_quote": null, "hs_pre_discount_amount": 4.0, "hs_product_id": null, "hs_recurring_billing_end_date": null, "hs_recurring_billing_number_of_payments": null, "hs_recurring_billing_period": null, "hs_recurring_billing_start_date": null, "hs_recurring_billing_terms": null, "hs_sku": "hs_sku_222_dsad_321f", "hs_sync_amount": null, "hs_tcv": 24.0, "hs_term_in_months": null, "hs_total_discount": 2.0, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_url": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hs_variant_id": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "name": "test line_item", "price": 2.0, "quantity": 2.0, "recurringbillingfrequency": "monthly", "tax": null, "test": null, "test_product_price": "2022-01-29"}, "createdAt": "2022-01-19T07:21:03.560Z", "updatedAt": "2022-01-19T07:21:03.560Z", "archived": false}, "emitted_at": 1655280740833} +{"stream": "owners", "data": {"id": "52550153", "email": "integration-test@airbyte.io", "firstName": "Team", "lastName": "Airbyte", "userId": 12282590, "createdAt": "2020-10-28T21:17:56.082Z", "updatedAt": "2022-05-06T17:31:10.509Z", "archived": false}, "emitted_at": 1655280742776} +{"stream": "owners", "data": {"id": "65568071", "email": "test-integration-test-user1@airbyte.io", "firstName": "", "lastName": "", "userId": 23660227, "createdAt": "2021-03-15T11:00:50.053Z", "updatedAt": "2021-03-15T11:00:50.053Z", "archived": false}, "emitted_at": 1655280742779} +{"stream": "owners", "data": {"id": "65568800", "email": "test-integration-test-user2@airbyte.io", "firstName": "", "lastName": "", "userId": 23660229, "createdAt": "2021-03-15T11:01:02.183Z", "updatedAt": "2021-03-15T11:01:02.183Z", "archived": false}, "emitted_at": 1655280742779} +{"stream": "owners", "data": {"id": "111720843", "email": "test-integration-test-user-3@testmail.com", "firstName": "", "lastName": "", "userId": 26748724, "createdAt": "2021-10-12T14:40:22.109Z", "updatedAt": "2021-10-12T14:40:22.109Z", "archived": false}, "emitted_at": 1655280742779} +{"stream": "owners", "data": {"id": "111730024", "email": "test-integration-test-user-4@testmail.com", "firstName": "", "lastName": "", "userId": 26748728, "createdAt": "2021-10-12T14:40:33.359Z", "updatedAt": "2021-10-12T14:40:33.359Z", "archived": false}, "emitted_at": 1655280742779} +{"stream": "products", "data": {"id": "646176421", "properties": {"amount": null, "createdate": "2021-02-23T20:03:18.336000+00:00", "description": null, "discount": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_avatar_filemanager_key": null, "hs_cost_of_goods_sold": null, "hs_created_by_user_id": 12282590, "hs_createdate": null, "hs_discount_percentage": null, "hs_folder_id": null, "hs_images": null, "hs_lastmodifieddate": "2021-02-23T20:03:18.336000+00:00", "hs_merged_object_ids": null, "hs_object_id": 646176421, "hs_recurring_billing_period": null, "hs_recurring_billing_start_date": null, "hs_sku": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": 12282590, "hs_url": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "name": "Test product", "price": 100.0, "quantity": null, "recurringbillingfrequency": null, "tax": null, "test": null, "test_product_price": null}, "createdAt": "2021-02-23T20:03:18.336Z", "updatedAt": "2021-02-23T20:03:18.336Z", "archived": false}, "emitted_at": 1655280743413} +{"stream": "products", "data": {"id": "646176423", "properties": {"amount": null, "createdate": "2021-02-23T20:03:48.577000+00:00", "description": null, "discount": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_avatar_filemanager_key": null, "hs_cost_of_goods_sold": null, "hs_created_by_user_id": 12282590, "hs_createdate": null, "hs_discount_percentage": null, "hs_folder_id": 2430008, "hs_images": null, "hs_lastmodifieddate": "2021-02-23T20:03:48.577000+00:00", "hs_merged_object_ids": null, "hs_object_id": 646176423, "hs_recurring_billing_period": null, "hs_recurring_billing_start_date": null, "hs_sku": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": 12282590, "hs_url": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "name": "Test product 1", "price": 123.0, "quantity": null, "recurringbillingfrequency": null, "tax": null, "test": null, "test_product_price": null}, "createdAt": "2021-02-23T20:03:48.577Z", "updatedAt": "2021-02-23T20:03:48.577Z", "archived": false}, "emitted_at": 1655280743415} +{"stream": "products", "data": {"id": "646316535", "properties": {"amount": null, "createdate": "2021-02-23T20:11:54.030000+00:00", "description": "baseball hat, large", "discount": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_avatar_filemanager_key": null, "hs_cost_of_goods_sold": 5.0, "hs_created_by_user_id": null, "hs_createdate": null, "hs_discount_percentage": null, "hs_folder_id": null, "hs_images": null, "hs_lastmodifieddate": "2021-02-23T20:11:54.030000+00:00", "hs_merged_object_ids": null, "hs_object_id": 646316535, "hs_recurring_billing_period": null, "hs_recurring_billing_start_date": null, "hs_sku": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_url": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "name": "Green Hat", "price": 10.0, "quantity": null, "recurringbillingfrequency": null, "tax": null, "test": null, "test_product_price": null}, "createdAt": "2021-02-23T20:11:54.030Z", "updatedAt": "2021-02-23T20:11:54.030Z", "archived": false}, "emitted_at": 1655280743416} +{"stream": "products", "data": {"id": "646777910", "properties": {"amount": null, "createdate": "2021-02-23T20:11:54.030000+00:00", "description": "Top hat, large", "discount": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_avatar_filemanager_key": null, "hs_cost_of_goods_sold": 10.0, "hs_created_by_user_id": null, "hs_createdate": null, "hs_discount_percentage": null, "hs_folder_id": null, "hs_images": null, "hs_lastmodifieddate": "2021-02-23T20:11:54.030000+00:00", "hs_merged_object_ids": null, "hs_object_id": 646777910, "hs_recurring_billing_period": null, "hs_recurring_billing_start_date": null, "hs_sku": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_url": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "name": "Red Hat", "price": 20.0, "quantity": null, "recurringbillingfrequency": null, "tax": null, "test": null, "test_product_price": null}, "createdAt": "2021-02-23T20:11:54.030Z", "updatedAt": "2021-02-23T20:11:54.030Z", "archived": false}, "emitted_at": 1655280743417} +{"stream": "products", "data": {"id": "646778218", "properties": {"amount": null, "createdate": "2021-02-23T20:11:54.030000+00:00", "description": "Baseball hat, medium", "discount": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_avatar_filemanager_key": null, "hs_cost_of_goods_sold": 5.0, "hs_created_by_user_id": null, "hs_createdate": null, "hs_discount_percentage": null, "hs_folder_id": null, "hs_images": null, "hs_lastmodifieddate": "2021-02-23T20:11:54.030000+00:00", "hs_merged_object_ids": null, "hs_object_id": 646778218, "hs_recurring_billing_period": null, "hs_recurring_billing_start_date": null, "hs_sku": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_url": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "name": "Blue Hat", "price": 10.0, "quantity": null, "recurringbillingfrequency": null, "tax": null, "test": null, "test_product_price": null}, "createdAt": "2021-02-23T20:11:54.030Z", "updatedAt": "2021-02-23T20:11:54.030Z", "archived": false}, "emitted_at": 1655280743418} +{"stream": "products", "data": {"id": "1293894464", "properties": {"amount": null, "createdate": "2021-10-12T14:31:31.319000+00:00", "description": null, "discount": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_avatar_filemanager_key": null, "hs_cost_of_goods_sold": null, "hs_created_by_user_id": 12282590, "hs_createdate": null, "hs_discount_percentage": null, "hs_folder_id": null, "hs_images": null, "hs_lastmodifieddate": "2021-10-12T14:31:31.319000+00:00", "hs_merged_object_ids": null, "hs_object_id": 1293894464, "hs_recurring_billing_period": null, "hs_recurring_billing_start_date": null, "hs_sku": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": 12282590, "hs_url": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "name": "Test Product 1", "price": 1.0, "quantity": null, "recurringbillingfrequency": null, "tax": null, "test": null, "test_product_price": null}, "createdAt": "2021-10-12T14:31:31.319Z", "updatedAt": "2021-10-12T14:31:31.319Z", "archived": false}, "emitted_at": 1655280743419} +{"stream": "products", "data": {"id": "1293894465", "properties": {"amount": null, "createdate": "2021-10-12T14:31:51.477000+00:00", "description": null, "discount": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_avatar_filemanager_key": null, "hs_cost_of_goods_sold": null, "hs_created_by_user_id": 12282590, "hs_createdate": null, "hs_discount_percentage": null, "hs_folder_id": null, "hs_images": null, "hs_lastmodifieddate": "2021-10-12T14:31:51.477000+00:00", "hs_merged_object_ids": null, "hs_object_id": 1293894465, "hs_recurring_billing_period": null, "hs_recurring_billing_start_date": null, "hs_sku": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": 12282590, "hs_url": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "name": "Test product 4", "price": 4.0, "quantity": null, "recurringbillingfrequency": null, "tax": null, "test": null, "test_product_price": null}, "createdAt": "2021-10-12T14:31:51.477Z", "updatedAt": "2021-10-12T14:31:51.477Z", "archived": false}, "emitted_at": 1655280743420} +{"stream": "products", "data": {"id": "1293894466", "properties": {"amount": null, "createdate": "2021-10-12T14:31:54.868000+00:00", "description": null, "discount": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_avatar_filemanager_key": null, "hs_cost_of_goods_sold": null, "hs_created_by_user_id": 12282590, "hs_createdate": null, "hs_discount_percentage": null, "hs_folder_id": null, "hs_images": null, "hs_lastmodifieddate": "2021-10-12T14:31:54.868000+00:00", "hs_merged_object_ids": null, "hs_object_id": 1293894466, "hs_recurring_billing_period": null, "hs_recurring_billing_start_date": null, "hs_sku": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": 12282590, "hs_url": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "name": "Test product 5", "price": 5.0, "quantity": null, "recurringbillingfrequency": null, "tax": null, "test": null, "test_product_price": null}, "createdAt": "2021-10-12T14:31:54.868Z", "updatedAt": "2021-10-12T14:31:54.868Z", "archived": false}, "emitted_at": 1655280743421} +{"stream": "products", "data": {"id": "1293894477", "properties": {"amount": null, "createdate": "2021-10-12T14:31:41.887000+00:00", "description": null, "discount": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_avatar_filemanager_key": null, "hs_cost_of_goods_sold": null, "hs_created_by_user_id": 12282590, "hs_createdate": null, "hs_discount_percentage": null, "hs_folder_id": null, "hs_images": null, "hs_lastmodifieddate": "2021-10-12T14:31:41.887000+00:00", "hs_merged_object_ids": null, "hs_object_id": 1293894477, "hs_recurring_billing_period": null, "hs_recurring_billing_start_date": null, "hs_sku": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": 12282590, "hs_url": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "name": "Test product 2", "price": 2.0, "quantity": null, "recurringbillingfrequency": null, "tax": null, "test": null, "test_product_price": null}, "createdAt": "2021-10-12T14:31:41.887Z", "updatedAt": "2021-10-12T14:31:41.887Z", "archived": false}, "emitted_at": 1655280743422} +{"stream": "products", "data": {"id": "1293894478", "properties": {"amount": null, "createdate": "2021-10-12T14:31:47.402000+00:00", "description": null, "discount": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_avatar_filemanager_key": null, "hs_cost_of_goods_sold": null, "hs_created_by_user_id": 12282590, "hs_createdate": null, "hs_discount_percentage": null, "hs_folder_id": null, "hs_images": null, "hs_lastmodifieddate": "2021-10-12T14:31:47.402000+00:00", "hs_merged_object_ids": null, "hs_object_id": 1293894478, "hs_recurring_billing_period": null, "hs_recurring_billing_start_date": null, "hs_sku": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": 12282590, "hs_url": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "name": "Test product 3", "price": 3.0, "quantity": null, "recurringbillingfrequency": null, "tax": null, "test": null, "test_product_price": null}, "createdAt": "2021-10-12T14:31:47.402Z", "updatedAt": "2021-10-12T14:31:47.402Z", "archived": false}, "emitted_at": 1655280743423} +{"stream": "products", "data": {"id": "1293894480", "properties": {"amount": null, "createdate": "2021-10-12T14:32:01.651000+00:00", "description": null, "discount": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_avatar_filemanager_key": null, "hs_cost_of_goods_sold": null, "hs_created_by_user_id": 12282590, "hs_createdate": null, "hs_discount_percentage": null, "hs_folder_id": null, "hs_images": null, "hs_lastmodifieddate": "2021-10-12T14:32:01.651000+00:00", "hs_merged_object_ids": null, "hs_object_id": 1293894480, "hs_recurring_billing_period": null, "hs_recurring_billing_start_date": null, "hs_sku": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": 12282590, "hs_url": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "name": "Test product 7", "price": 7.0, "quantity": null, "recurringbillingfrequency": null, "tax": null, "test": null, "test_product_price": null}, "createdAt": "2021-10-12T14:32:01.651Z", "updatedAt": "2021-10-12T14:32:01.651Z", "archived": false}, "emitted_at": 1655280743424} +{"stream": "products", "data": {"id": "1293894481", "properties": {"amount": null, "createdate": "2021-10-12T14:32:05.213000+00:00", "description": null, "discount": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_avatar_filemanager_key": null, "hs_cost_of_goods_sold": null, "hs_created_by_user_id": 12282590, "hs_createdate": null, "hs_discount_percentage": null, "hs_folder_id": null, "hs_images": null, "hs_lastmodifieddate": "2021-10-12T14:32:05.213000+00:00", "hs_merged_object_ids": null, "hs_object_id": 1293894481, "hs_recurring_billing_period": null, "hs_recurring_billing_start_date": null, "hs_sku": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": 12282590, "hs_url": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "name": "Test product 8", "price": 8.0, "quantity": null, "recurringbillingfrequency": null, "tax": null, "test": null, "test_product_price": null}, "createdAt": "2021-10-12T14:32:05.213Z", "updatedAt": "2021-10-12T14:32:05.213Z", "archived": false}, "emitted_at": 1655280743424} +{"stream": "products", "data": {"id": "1293894482", "properties": {"amount": null, "createdate": "2021-10-12T14:32:10.207000+00:00", "description": null, "discount": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_avatar_filemanager_key": null, "hs_cost_of_goods_sold": null, "hs_created_by_user_id": 12282590, "hs_createdate": null, "hs_discount_percentage": null, "hs_folder_id": null, "hs_images": null, "hs_lastmodifieddate": "2021-10-12T14:32:10.207000+00:00", "hs_merged_object_ids": null, "hs_object_id": 1293894482, "hs_recurring_billing_period": null, "hs_recurring_billing_start_date": null, "hs_sku": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": 12282590, "hs_url": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "name": "Test product 9", "price": 9.0, "quantity": null, "recurringbillingfrequency": null, "tax": null, "test": null, "test_product_price": null}, "createdAt": "2021-10-12T14:32:10.207Z", "updatedAt": "2021-10-12T14:32:10.207Z", "archived": false}, "emitted_at": 1655280743425} +{"stream": "products", "data": {"id": "1293894483", "properties": {"amount": null, "createdate": "2021-10-12T14:32:18.991000+00:00", "description": null, "discount": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_avatar_filemanager_key": null, "hs_cost_of_goods_sold": null, "hs_created_by_user_id": 12282590, "hs_createdate": null, "hs_discount_percentage": null, "hs_folder_id": null, "hs_images": null, "hs_lastmodifieddate": "2021-10-12T14:32:18.991000+00:00", "hs_merged_object_ids": null, "hs_object_id": 1293894483, "hs_recurring_billing_period": null, "hs_recurring_billing_start_date": null, "hs_sku": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": 12282590, "hs_url": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "name": "Test product 10", "price": 10.0, "quantity": null, "recurringbillingfrequency": null, "tax": null, "test": null, "test_product_price": null}, "createdAt": "2021-10-12T14:32:18.991Z", "updatedAt": "2021-10-12T14:32:18.991Z", "archived": false}, "emitted_at": 1655280743426} +{"stream": "products", "data": {"id": "1293909945", "properties": {"amount": null, "createdate": "2021-10-12T14:31:58.082000+00:00", "description": null, "discount": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_avatar_filemanager_key": null, "hs_cost_of_goods_sold": null, "hs_created_by_user_id": 12282590, "hs_createdate": null, "hs_discount_percentage": null, "hs_folder_id": null, "hs_images": null, "hs_lastmodifieddate": "2021-10-12T14:31:58.082000+00:00", "hs_merged_object_ids": null, "hs_object_id": 1293909945, "hs_recurring_billing_period": null, "hs_recurring_billing_start_date": null, "hs_sku": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": 12282590, "hs_url": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "name": "Test product 6", "price": 6.0, "quantity": null, "recurringbillingfrequency": null, "tax": null, "test": null, "test_product_price": null}, "createdAt": "2021-10-12T14:31:58.082Z", "updatedAt": "2021-10-12T14:31:58.082Z", "archived": false}, "emitted_at": 1655280743427} +{"stream": "products", "data": {"id": "1293910016", "properties": {"amount": null, "createdate": "2021-10-12T14:32:33.265000+00:00", "description": null, "discount": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_avatar_filemanager_key": null, "hs_cost_of_goods_sold": null, "hs_created_by_user_id": 12282590, "hs_createdate": null, "hs_discount_percentage": null, "hs_folder_id": null, "hs_images": null, "hs_lastmodifieddate": "2021-10-12T14:32:33.265000+00:00", "hs_merged_object_ids": null, "hs_object_id": 1293910016, "hs_recurring_billing_period": null, "hs_recurring_billing_start_date": null, "hs_sku": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": 12282590, "hs_url": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "name": "Test product 14", "price": 4.0, "quantity": null, "recurringbillingfrequency": null, "tax": null, "test": null, "test_product_price": null}, "createdAt": "2021-10-12T14:32:33.265Z", "updatedAt": "2021-10-12T14:32:33.265Z", "archived": false}, "emitted_at": 1655280743428} +{"stream": "products", "data": {"id": "1293910017", "properties": {"amount": null, "createdate": "2021-10-12T14:32:36.705000+00:00", "description": null, "discount": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_avatar_filemanager_key": null, "hs_cost_of_goods_sold": null, "hs_created_by_user_id": 12282590, "hs_createdate": null, "hs_discount_percentage": null, "hs_folder_id": null, "hs_images": null, "hs_lastmodifieddate": "2022-01-19T07:13:38.542000+00:00", "hs_merged_object_ids": null, "hs_object_id": 1293910017, "hs_recurring_billing_period": null, "hs_recurring_billing_start_date": null, "hs_sku": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": 12282590, "hs_url": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "name": "Test product 15", "price": 5.0, "quantity": null, "recurringbillingfrequency": null, "tax": null, "test": "2022-01-20", "test_product_price": "2022-01-27"}, "createdAt": "2021-10-12T14:32:36.705Z", "updatedAt": "2022-01-19T07:13:38.542Z", "archived": false}, "emitted_at": 1655280743429} +{"stream": "products", "data": {"id": "1293915738", "properties": {"amount": null, "createdate": "2021-10-12T14:32:23.027000+00:00", "description": null, "discount": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_avatar_filemanager_key": null, "hs_cost_of_goods_sold": null, "hs_created_by_user_id": 12282590, "hs_createdate": null, "hs_discount_percentage": null, "hs_folder_id": null, "hs_images": null, "hs_lastmodifieddate": "2021-10-12T14:32:23.027000+00:00", "hs_merged_object_ids": null, "hs_object_id": 1293915738, "hs_recurring_billing_period": null, "hs_recurring_billing_start_date": null, "hs_sku": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": 12282590, "hs_url": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "name": "Test product 11", "price": 1.0, "quantity": null, "recurringbillingfrequency": null, "tax": null, "test": null, "test_product_price": null}, "createdAt": "2021-10-12T14:32:23.027Z", "updatedAt": "2021-10-12T14:32:23.027Z", "archived": false}, "emitted_at": 1655280743429} +{"stream": "products", "data": {"id": "1293915739", "properties": {"amount": null, "createdate": "2021-10-12T14:32:26.359000+00:00", "description": null, "discount": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_avatar_filemanager_key": null, "hs_cost_of_goods_sold": null, "hs_created_by_user_id": 12282590, "hs_createdate": null, "hs_discount_percentage": null, "hs_folder_id": null, "hs_images": null, "hs_lastmodifieddate": "2021-10-12T14:32:26.359000+00:00", "hs_merged_object_ids": null, "hs_object_id": 1293915739, "hs_recurring_billing_period": null, "hs_recurring_billing_start_date": null, "hs_sku": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": 12282590, "hs_url": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "name": "Test product 12", "price": 2.0, "quantity": null, "recurringbillingfrequency": null, "tax": null, "test": null, "test_product_price": null}, "createdAt": "2021-10-12T14:32:26.359Z", "updatedAt": "2021-10-12T14:32:26.359Z", "archived": false}, "emitted_at": 1655280743430} +{"stream": "products", "data": {"id": "1293915740", "properties": {"amount": null, "createdate": "2021-10-12T14:32:29.801000+00:00", "description": null, "discount": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_avatar_filemanager_key": null, "hs_cost_of_goods_sold": null, "hs_created_by_user_id": 12282590, "hs_createdate": null, "hs_discount_percentage": null, "hs_folder_id": null, "hs_images": null, "hs_lastmodifieddate": "2021-10-12T14:32:29.801000+00:00", "hs_merged_object_ids": null, "hs_object_id": 1293915740, "hs_recurring_billing_period": null, "hs_recurring_billing_start_date": null, "hs_sku": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": 12282590, "hs_url": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "name": "Test product 13", "price": 3.0, "quantity": null, "recurringbillingfrequency": null, "tax": null, "test": null, "test_product_price": null}, "createdAt": "2021-10-12T14:32:29.801Z", "updatedAt": "2021-10-12T14:32:29.801Z", "archived": false}, "emitted_at": 1655280743431} +{"stream": "subscription_changes", "data": {"timestamp": 1616173134301, "normalizedEmailId": "0c90ecf5-629e-4fe4-8516-05f75636c3e3", "recipient": "kulak.eugene@gmail.com", "changes": [{"timestamp": 1616173134301, "causedByEvent": {"id": "d70b78b9-a411-4d3e-808b-fe931be35b43", "created": 1616173134301}, "changeType": "PORTAL_STATUS", "change": "SUBSCRIBED", "portalId": 8727216, "source": "SOURCE_HUBSPOT_CUSTOMER"}], "portalId": 8727216}, "emitted_at": 1655280754251} +{"stream": "subscription_changes", "data": {"timestamp": 1616173134301, "normalizedEmailId": "0c90ecf5-629e-4fe4-8516-05f75636c3e3", "recipient": "kulak.eugene@gmail.com", "changes": [{"timestamp": 1616173134301, "subscriptionId": 10798197, "causedByEvent": {"id": "ff118718-786d-4a35-94f9-6bbd413654de", "created": 1616173134301}, "changeType": "SUBSCRIPTION_STATUS", "change": "SUBSCRIBED", "portalId": 8727216, "source": "SOURCE_HUBSPOT_CUSTOMER"}], "portalId": 8727216}, "emitted_at": 1655280754254} +{"stream": "subscription_changes", "data": {"timestamp": 1616173106737, "normalizedEmailId": "0c90ecf5-629e-4fe4-8516-05f75636c3e3", "recipient": "kulak.eugene@gmail.com", "changes": [{"timestamp": 1616173106737, "causedByEvent": {"id": "24539f1f-0b20-4296-a5bf-6ba3bb9dc1b8", "created": 1616173106737}, "changeType": "PORTAL_STATUS", "change": "SUBSCRIBED", "portalId": 8727216, "source": "SOURCE_HUBSPOT_CUSTOMER"}], "portalId": 8727216}, "emitted_at": 1655280754254} +{"stream": "subscription_changes", "data": {"timestamp": 1616173054611, "normalizedEmailId": "395b1a1a-c4d3-4f69-9781-291def88550f", "recipient": "sherif@dataline.io", "changes": [{"timestamp": 1616173054611, "subscriptionId": 10798197, "causedByEvent": {"id": "e0c1eb45-612f-4beb-91a2-4352fbbbd100", "created": 1616173054611}, "changeType": "SUBSCRIPTION_STATUS", "change": "SUBSCRIBED", "portalId": 8727216, "source": "SOURCE_HUBSPOT_CUSTOMER"}], "portalId": 8727216}, "emitted_at": 1655280754254} +{"stream": "subscription_changes", "data": {"timestamp": 1616173054611, "normalizedEmailId": "395b1a1a-c4d3-4f69-9781-291def88550f", "recipient": "sherif@dataline.io", "changes": [{"timestamp": 1616173054611, "causedByEvent": {"id": "cb6107f5-ba69-403d-8de5-4d206c774948", "created": 1616173054611}, "changeType": "PORTAL_STATUS", "change": "SUBSCRIBED", "portalId": 8727216, "source": "SOURCE_HUBSPOT_CUSTOMER"}], "portalId": 8727216}, "emitted_at": 1655280754254} +{"stream": "subscription_changes", "data": {"timestamp": 1634051145732, "normalizedEmailId": "939f9d71-bb3c-4490-8d32-4d244b8aa329", "recipient": "testingapicontact_19@hubspot.com", "changes": [{"timestamp": 1634051145732, "causedByEvent": {"id": "569ea4bb-eee0-4718-9853-644b676101c9", "created": 1634051145732}, "changeType": "PORTAL_STATUS", "change": "SUBSCRIBED", "portalId": 8727216, "source": "SOURCE_HUBSPOT_CUSTOMER"}], "portalId": 8727216}, "emitted_at": 1655280756702} +{"stream": "subscription_changes", "data": {"timestamp": 1634051145732, "normalizedEmailId": "939f9d71-bb3c-4490-8d32-4d244b8aa329", "recipient": "testingapicontact_19@hubspot.com", "changes": [{"timestamp": 1634051145732, "subscriptionId": 23704464, "causedByEvent": {"id": "66677dbc-3d7b-4a4e-8ed8-023dad7223a1", "created": 1634051145732}, "changeType": "SUBSCRIPTION_STATUS", "change": "UNSUBSCRIBED", "portalId": 8727216, "source": "SOURCE_HUBSPOT_CUSTOMER"}], "portalId": 8727216}, "emitted_at": 1655280756703} +{"stream": "subscription_changes", "data": {"timestamp": 1634051105091, "normalizedEmailId": "939f9d71-bb3c-4490-8d32-4d244b8aa329", "recipient": "testingapicontact_19@hubspot.com", "changes": [{"timestamp": 1634051105091, "causedByEvent": {"id": "331da840-e177-42e7-9758-08f430a40428", "created": 1634051105091}, "changeType": "PORTAL_STATUS", "change": "SUBSCRIBED", "portalId": 8727216, "source": "SOURCE_HUBSPOT_CUSTOMER"}], "portalId": 8727216}, "emitted_at": 1655280756703} +{"stream": "subscription_changes", "data": {"timestamp": 1634051105091, "normalizedEmailId": "939f9d71-bb3c-4490-8d32-4d244b8aa329", "recipient": "testingapicontact_19@hubspot.com", "changes": [{"timestamp": 1634051105091, "subscriptionId": 23704464, "causedByEvent": {"id": "f27707ce-833e-4d47-b0fe-2cd9cbec786e", "created": 1634051105091}, "changeType": "SUBSCRIPTION_STATUS", "change": "SUBSCRIBED", "portalId": 8727216, "source": "SOURCE_HUBSPOT_CUSTOMER"}], "portalId": 8727216}, "emitted_at": 1655280756704} +{"stream": "workflows", "data": {"migrationStatus": {"portalId": 8727216, "flowId": 50206671, "workflowId": 21058115, "migrationStatus": "EXECUTION_MIGRATED", "enrollmentMigrationStatus": "CLASSIC_OWNED", "platformOwnsActions": true, "lastSuccessfulMigrationTimestamp": null}, "contactListIds": {"enrolled": 12, "active": 13, "completed": 14, "succeeded": 15}, "personaTagIds": [], "contactCounts": {"active": 0, "enrolled": 0}, "creationSource": {"sourceApplication": {"source": "DIRECT_API"}, "createdAt": 1610635826795}, "updateSource": {"sourceApplication": {"source": "DIRECT_API", "serviceName": "AutomationPlatformService-web"}, "updatedAt": 1611847907577}, "type": "DRIP_DELAY", "enabled": false, "id": 21058115, "portalId": 8727216, "insertedAt": 1610635826921, "updatedAt": 1611847907577, "name": "Test Workflow"}, "emitted_at": 1655280761823} +{"stream": "workflows", "data": {"migrationStatus": {"portalId": 8727216, "flowId": 50205684, "workflowId": 21058121, "migrationStatus": "EXECUTION_MIGRATED", "enrollmentMigrationStatus": "CLASSIC_OWNED", "platformOwnsActions": true, "lastSuccessfulMigrationTimestamp": null}, "contactListIds": {"enrolled": 16, "active": 17, "completed": 18, "succeeded": 19}, "personaTagIds": [], "contactCounts": {"active": 0, "enrolled": 0}, "creationSource": {"sourceApplication": {"source": "DIRECT_API"}, "createdAt": 1610635850713}, "updateSource": {"sourceApplication": {"source": "DIRECT_API", "serviceName": "AutomationPlatformService-web"}, "updatedAt": 1611847907579}, "type": "DRIP_DELAY", "enabled": false, "id": 21058121, "portalId": 8727216, "insertedAt": 1610635850758, "updatedAt": 1611847907579, "name": "Test Workflow 1"}, "emitted_at": 1655280761825} +{"stream": "workflows", "data": {"migrationStatus": {"portalId": 8727216, "flowId": 50205036, "workflowId": 21058122, "migrationStatus": "EXECUTION_MIGRATED", "enrollmentMigrationStatus": "CLASSIC_OWNED", "platformOwnsActions": true, "lastSuccessfulMigrationTimestamp": null}, "contactListIds": {"enrolled": 20, "active": 21, "completed": 22, "succeeded": 23}, "personaTagIds": [], "contactCounts": {"active": 0, "enrolled": 0}, "creationSource": {"sourceApplication": {"source": "DIRECT_API"}, "createdAt": 1610635859664}, "updateSource": {"sourceApplication": {"source": "DIRECT_API", "serviceName": "AutomationPlatformService-web"}, "updatedAt": 1611847907578}, "type": "DRIP_DELAY", "enabled": false, "id": 21058122, "portalId": 8727216, "insertedAt": 1610635859748, "updatedAt": 1611847907578, "name": "Test Workflow 2"}, "emitted_at": 1655280761825} +{"stream": "workflows", "data": {"migrationStatus": {"portalId": 8727216, "flowId": 50205471, "workflowId": 21058124, "migrationStatus": "EXECUTION_MIGRATED", "enrollmentMigrationStatus": "CLASSIC_OWNED", "platformOwnsActions": true, "lastSuccessfulMigrationTimestamp": null}, "contactListIds": {"enrolled": 24, "active": 25, "completed": 26, "succeeded": 27}, "personaTagIds": [], "contactCounts": {"active": 0, "enrolled": 0}, "creationSource": {"sourceApplication": {"source": "DIRECT_API"}, "createdAt": 1610635861348}, "updateSource": {"sourceApplication": {"source": "DIRECT_API", "serviceName": "AutomationPlatformService-web"}, "updatedAt": 1611847907579}, "type": "DRIP_DELAY", "enabled": false, "id": 21058124, "portalId": 8727216, "insertedAt": 1610635861400, "updatedAt": 1611847907579, "name": "Test Workflow 3"}, "emitted_at": 1655280761826} +{"stream": "workflows", "data": {"migrationStatus": {"portalId": 8727216, "flowId": 50205472, "workflowId": 21058125, "migrationStatus": "EXECUTION_MIGRATED", "enrollmentMigrationStatus": "CLASSIC_OWNED", "platformOwnsActions": true, "lastSuccessfulMigrationTimestamp": null}, "contactListIds": {"enrolled": 28, "active": 29, "completed": 30, "succeeded": 31}, "personaTagIds": [], "contactCounts": {"active": 0, "enrolled": 0}, "creationSource": {"sourceApplication": {"source": "DIRECT_API"}, "createdAt": 1610635863152}, "updateSource": {"sourceApplication": {"source": "DIRECT_API", "serviceName": "AutomationPlatformService-web"}, "updatedAt": 1611847907579}, "type": "DRIP_DELAY", "enabled": false, "id": 21058125, "portalId": 8727216, "insertedAt": 1610635863199, "updatedAt": 1611847907579, "name": "Test Workflow 4"}, "emitted_at": 1655280761826} +{"stream": "workflows", "data": {"migrationStatus": {"portalId": 8727216, "flowId": 50206988, "workflowId": 21058126, "migrationStatus": "EXECUTION_MIGRATED", "enrollmentMigrationStatus": "CLASSIC_OWNED", "platformOwnsActions": true, "lastSuccessfulMigrationTimestamp": null}, "contactListIds": {"enrolled": 32, "active": 33, "completed": 34, "succeeded": 35}, "personaTagIds": [], "contactCounts": {"active": 0, "enrolled": 0}, "creationSource": {"sourceApplication": {"source": "DIRECT_API"}, "createdAt": 1610635864487}, "updateSource": {"sourceApplication": {"source": "DIRECT_API", "serviceName": "AutomationPlatformService-web"}, "updatedAt": 1611847907578}, "type": "DRIP_DELAY", "enabled": false, "id": 21058126, "portalId": 8727216, "insertedAt": 1610635864533, "updatedAt": 1611847907578, "name": "Test Workflow 5"}, "emitted_at": 1655280761826} +{"stream": "workflows", "data": {"migrationStatus": {"portalId": 8727216, "flowId": 50205685, "workflowId": 21058127, "migrationStatus": "EXECUTION_MIGRATED", "enrollmentMigrationStatus": "CLASSIC_OWNED", "platformOwnsActions": true, "lastSuccessfulMigrationTimestamp": null}, "contactListIds": {"enrolled": 36, "active": 37, "completed": 38, "succeeded": 39}, "personaTagIds": [], "contactCounts": {"active": 0, "enrolled": 0}, "creationSource": {"sourceApplication": {"source": "DIRECT_API"}, "createdAt": 1610635865837}, "updateSource": {"sourceApplication": {"source": "DIRECT_API", "serviceName": "AutomationPlatformService-web"}, "updatedAt": 1611847907580}, "type": "DRIP_DELAY", "enabled": false, "id": 21058127, "portalId": 8727216, "insertedAt": 1610635865916, "updatedAt": 1611847907580, "name": "Test Workflow 6"}, "emitted_at": 1655280761826} +{"stream": "workflows", "data": {"migrationStatus": {"portalId": 8727216, "flowId": 50206989, "workflowId": 21058129, "migrationStatus": "EXECUTION_MIGRATED", "enrollmentMigrationStatus": "CLASSIC_OWNED", "platformOwnsActions": true, "lastSuccessfulMigrationTimestamp": null}, "contactListIds": {"enrolled": 40, "active": 41, "completed": 42, "succeeded": 43}, "personaTagIds": [], "contactCounts": {"active": 0, "enrolled": 0}, "creationSource": {"sourceApplication": {"source": "DIRECT_API"}, "createdAt": 1610635867253}, "updateSource": {"sourceApplication": {"source": "DIRECT_API", "serviceName": "AutomationPlatformService-web"}, "updatedAt": 1611847907579}, "type": "DRIP_DELAY", "enabled": false, "id": 21058129, "portalId": 8727216, "insertedAt": 1610635867311, "updatedAt": 1611847907579, "name": "Test Workflow 7"}, "emitted_at": 1655280761826} +{"stream": "workflows", "data": {"migrationStatus": {"portalId": 8727216, "flowId": 50205473, "workflowId": 21058131, "migrationStatus": "EXECUTION_MIGRATED", "enrollmentMigrationStatus": "CLASSIC_OWNED", "platformOwnsActions": true, "lastSuccessfulMigrationTimestamp": null}, "contactListIds": {"enrolled": 44, "active": 45, "completed": 46, "succeeded": 47}, "personaTagIds": [], "contactCounts": {"active": 0, "enrolled": 0}, "creationSource": {"sourceApplication": {"source": "DIRECT_API"}, "createdAt": 1610635868766}, "updateSource": {"sourceApplication": {"source": "DIRECT_API", "serviceName": "AutomationPlatformService-web"}, "updatedAt": 1611847907580}, "type": "DRIP_DELAY", "enabled": false, "id": 21058131, "portalId": 8727216, "insertedAt": 1610635868840, "updatedAt": 1611847907580, "name": "Test Workflow 8"}, "emitted_at": 1655280761827} +{"stream": "workflows", "data": {"migrationStatus": {"portalId": 8727216, "flowId": 50207234, "workflowId": 21058132, "migrationStatus": "EXECUTION_MIGRATED", "enrollmentMigrationStatus": "CLASSIC_OWNED", "platformOwnsActions": true, "lastSuccessfulMigrationTimestamp": null}, "contactListIds": {"enrolled": 48, "active": 49, "completed": 50, "succeeded": 51}, "personaTagIds": [], "contactCounts": {"active": 0, "enrolled": 0}, "creationSource": {"sourceApplication": {"source": "DIRECT_API"}, "createdAt": 1610635870326}, "updateSource": {"sourceApplication": {"source": "DIRECT_API", "serviceName": "AutomationPlatformService-web"}, "updatedAt": 1611847907580}, "type": "DRIP_DELAY", "enabled": false, "id": 21058132, "portalId": 8727216, "insertedAt": 1610635870378, "updatedAt": 1611847907580, "name": "Test Workflow 9"}, "emitted_at": 1655280761827} +{"stream": "workflows", "data": {"migrationStatus": {"portalId": 8727216, "flowId": 50206990, "workflowId": 21058133, "migrationStatus": "EXECUTION_MIGRATED", "enrollmentMigrationStatus": "CLASSIC_OWNED", "platformOwnsActions": true, "lastSuccessfulMigrationTimestamp": null}, "contactListIds": {"enrolled": 52, "active": 53, "completed": 54, "succeeded": 55}, "personaTagIds": [], "contactCounts": {"active": 0, "enrolled": 0}, "creationSource": {"sourceApplication": {"source": "DIRECT_API"}, "createdAt": 1610635871945}, "updateSource": {"sourceApplication": {"source": "DIRECT_API", "serviceName": "AutomationPlatformService-web"}, "updatedAt": 1611847907577}, "type": "DRIP_DELAY", "enabled": false, "id": 21058133, "portalId": 8727216, "insertedAt": 1610635871988, "updatedAt": 1611847907577, "name": "Test Workflow 10"}, "emitted_at": 1655280761827} +{"stream": "workflows", "data": {"migrationStatus": {"portalId": 8727216, "flowId": 50205688, "workflowId": 21058170, "migrationStatus": "EXECUTION_MIGRATED", "enrollmentMigrationStatus": "CLASSIC_OWNED", "platformOwnsActions": true, "lastSuccessfulMigrationTimestamp": null}, "contactListIds": {"enrolled": 56, "active": 57, "completed": 58, "succeeded": 59}, "personaTagIds": [], "contactCounts": {"active": 0, "enrolled": 0}, "creationSource": {"sourceApplication": {"source": "DIRECT_API"}, "createdAt": 1610635970544}, "updateSource": {"sourceApplication": {"source": "DIRECT_API", "serviceName": "AutomationPlatformService-web"}, "updatedAt": 1611847907578}, "type": "DRIP_DELAY", "enabled": false, "id": 21058170, "portalId": 8727216, "insertedAt": 1610635970626, "updatedAt": 1611847907578, "name": "Test Workflow 1"}, "emitted_at": 1655280761827} +{"stream": "workflows", "data": {"migrationStatus": {"portalId": 8727216, "flowId": 59837068, "workflowId": 23314874, "migrationStatus": "EXECUTION_MIGRATED", "enrollmentMigrationStatus": "CLASSIC_OWNED", "platformOwnsActions": true, "lastSuccessfulMigrationTimestamp": null}, "contactListIds": {"enrolled": 62, "active": 63, "completed": 64, "succeeded": 65}, "personaTagIds": [], "lastUpdatedByUserId": 12282590, "contactCounts": {"active": 0, "enrolled": 0}, "description": "", "creationSource": {"sourceApplication": {"source": "WORKFLOWS_APP", "serviceName": "https://app.hubspot.com/workflows/8727216/platform/create/new?flowTypeId=0-1&scrollToElementId=scroll-to-new-action-config-0-1"}, "createdByUser": {"userId": 12282590, "userEmail": "integration-test@airbyte.io"}, "createdAt": 1615805884687}, "updateSource": {"sourceApplication": {"source": "WORKFLOWS_APP", "serviceName": "https://app.hubspot.com/workflows/8727216/platform/create/new?flowTypeId=0-1&scrollToElementId=scroll-to-new-action-config-0-1"}, "updatedByUser": {"userId": 12282590, "userEmail": "integration-test@airbyte.io"}, "updatedAt": 1615805884687}, "type": "DRIP_DELAY", "enabled": false, "id": 23314874, "originalAuthorUserId": 12282590, "portalId": 8727216, "insertedAt": 1615805884696, "updatedAt": 1615805884687, "name": "Unnamed workflow - Mon Mar 15 2021 12:58:03 GMT+0200"}, "emitted_at": 1655280761827} diff --git a/airbyte-integrations/connectors/source-hubspot/sample_files/basic_read_catalog.json b/airbyte-integrations/connectors/source-hubspot/sample_files/basic_read_catalog.json index aeccc52d02f7..1ca557931598 100644 --- a/airbyte-integrations/connectors/source-hubspot/sample_files/basic_read_catalog.json +++ b/airbyte-integrations/connectors/source-hubspot/sample_files/basic_read_catalog.json @@ -13,32 +13,41 @@ "stream": { "name": "companies", "json_schema": {}, - "supported_sync_modes": ["full_refresh"] + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updatedAt"] }, "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" + "cursor_field": ["updatedAt"], + "destination_sync_mode": "append" }, { "stream": { "name": "contact_lists", "json_schema": {}, - "supported_sync_modes": ["full_refresh"] + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updatedAt"] }, "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" + "cursor_field": ["updatedAt"], + "destination_sync_mode": "append" }, { "stream": { "name": "contacts", "json_schema": {}, - "supported_sync_modes": ["full_refresh"] + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updatedAt"] }, "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" + "cursor_field": ["updatedAt"], + "destination_sync_mode": "append" }, { "stream": { - "name": "contacts_list_memberships", + "name": "deal_pipelines", "json_schema": {}, "supported_sync_modes": ["full_refresh"] }, @@ -47,30 +56,111 @@ }, { "stream": { - "name": "deal_pipelines", + "name": "deals", "json_schema": {}, - "supported_sync_modes": ["full_refresh"] + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updatedAt"] }, "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" + "cursor_field": ["updatedAt"], + "destination_sync_mode": "append" }, { "stream": { - "name": "deals", + "name": "email_events", "json_schema": {}, - "supported_sync_modes": ["full_refresh"] + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["created"] }, "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" + "cursor_field": ["created"], + "destination_sync_mode": "append" }, { "stream": { "name": "engagements", "json_schema": {}, - "supported_sync_modes": ["full_refresh"] + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["lastUpdated"] }, "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" + "cursor_field": ["lastUpdated"], + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "engagements_calls", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updatedAt"] + }, + "sync_mode": "full_refresh", + "cursor_field": ["updatedAt"], + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "engagements_emails", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updatedAt"] + }, + "sync_mode": "full_refresh", + "cursor_field": ["updatedAt"], + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "engagements_meetings", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updatedAt"] + }, + "sync_mode": "full_refresh", + "cursor_field": ["updatedAt"], + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "engagements_notes", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updatedAt"] + }, + "sync_mode": "full_refresh", + "cursor_field": ["updatedAt"], + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "engagements_tasks", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updatedAt"] + }, + "sync_mode": "full_refresh", + "cursor_field": ["updatedAt"], + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "feedback_submissions", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updatedAt"] + }, + "sync_mode": "full_refresh", + "cursor_field": ["updatedAt"], + "destination_sync_mode": "append" }, { "stream": { @@ -94,9 +184,24 @@ "stream": { "name": "line_items", "json_schema": {}, - "supported_sync_modes": ["full_refresh"] + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updatedAt"] + }, + "sync_mode": "full_refresh", + "cursor_field": ["updatedAt"], + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "marketing_emails", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"], + "source_defined_cursor": false, + "default_cursor_field": ["updated"] }, "sync_mode": "full_refresh", + "cursor_field": null, "destination_sync_mode": "overwrite" }, { @@ -112,37 +217,60 @@ "stream": { "name": "products", "json_schema": {}, - "supported_sync_modes": ["full_refresh"] + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updatedAt"] }, "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" + "cursor_field": ["updatedAt"], + "destination_sync_mode": "append" }, { "stream": { "name": "property_history", "json_schema": {}, - "supported_sync_modes": ["full_refresh"] + "supported_sync_modes": ["full_refresh", "incremental"], + "default_cursor_field": ["timestamp"] }, "sync_mode": "full_refresh", + "cursor_field": ["timestamp"], "destination_sync_mode": "overwrite" }, { "stream": { "name": "quotes", "json_schema": {}, - "supported_sync_modes": ["full_refresh"] + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updatedAt"] }, "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" + "cursor_field": ["updatedAt"], + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "subscription_changes", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["timestamp"] + }, + "sync_mode": "full_refresh", + "cursor_field": ["timestamp"], + "destination_sync_mode": "append" }, { "stream": { "name": "tickets", "json_schema": {}, - "supported_sync_modes": ["full_refresh"] + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updatedAt"] }, "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" + "cursor_field": ["updatedAt"], + "destination_sync_mode": "append" }, { "stream": { diff --git a/airbyte-integrations/connectors/source-hubspot/sample_files/basic_read_oauth_catalog.json b/airbyte-integrations/connectors/source-hubspot/sample_files/basic_read_oauth_catalog.json index 08402c4ee164..10b2772ae87d 100644 --- a/airbyte-integrations/connectors/source-hubspot/sample_files/basic_read_oauth_catalog.json +++ b/airbyte-integrations/connectors/source-hubspot/sample_files/basic_read_oauth_catalog.json @@ -71,7 +71,7 @@ "source_defined_cursor": true, "default_cursor_field": ["created"] }, - "sync_mode": "incremental", + "sync_mode": "full_refresh", "cursor_field": ["created"], "destination_sync_mode": "append" }, @@ -169,7 +169,7 @@ "source_defined_cursor": true, "default_cursor_field": ["timestamp"] }, - "sync_mode": "incremental", + "sync_mode": "full_refresh", "cursor_field": ["timestamp"], "destination_sync_mode": "append" }, diff --git a/airbyte-integrations/connectors/source-hubspot/source_hubspot/helpers.py b/airbyte-integrations/connectors/source-hubspot/source_hubspot/helpers.py index d34ca72be81f..cf35dc524ae3 100644 --- a/airbyte-integrations/connectors/source-hubspot/source_hubspot/helpers.py +++ b/airbyte-integrations/connectors/source-hubspot/source_hubspot/helpers.py @@ -3,7 +3,8 @@ # import abc -from typing import MutableMapping +import urllib.parse +from typing import Iterator, List, MutableMapping class IRecordPostProcessor(abc.ABC): @@ -51,3 +52,62 @@ def add_record(self, record: MutableMapping): @property def flat(self): return self._storage + + +class IURLPropertyRepresentation(abc.ABC): + # The value is obtained experimentally, HubSpot allows the URL length up to ~16300 symbols, + # so it was decided to limit the length of the `properties` parameter to 15000 characters. + PROPERTIES_PARAM_MAX_LENGTH = 15000 + + def __init__(self, properties: List[str]): + self.properties = properties + + def __bool__(self): + return bool(self.properties) + + @property + @abc.abstractmethod + def as_url_param(self): + """""" + + @property + @abc.abstractmethod + def _term_representation(self): + """""" + + def split(self) -> Iterator["IURLPropertyRepresentation"]: + summary_length = 0 + local_properties = [] + for property_ in self.properties: + current_property_length = len(urllib.parse.quote(self._term_representation.format(property=property_))) + if current_property_length + summary_length >= self.PROPERTIES_PARAM_MAX_LENGTH: + yield type(self)(local_properties) + local_properties = [] + summary_length = 0 + + local_properties.append(property_) + summary_length += current_property_length + + if local_properties: + yield type(self)(local_properties) + + @property + def too_many_properties(self) -> bool: + # Do not iterate over the generator until the end. Here we need to know if it produces more than one record + generator = self.split() + _ = next(generator) + return next(generator, None) is not None + + +class APIv1Property(IURLPropertyRepresentation): + _term_representation = "property={property}&" + + def as_url_param(self): + return {"property": self.properties} + + +class APIv3Property(IURLPropertyRepresentation): + _term_representation = "{property}," + + def as_url_param(self): + return {"properties": ",".join(self.properties)} diff --git a/airbyte-integrations/connectors/source-hubspot/source_hubspot/streams.py b/airbyte-integrations/connectors/source-hubspot/source_hubspot/streams.py index 4b790d7a5d72..9a839af1d0c6 100644 --- a/airbyte-integrations/connectors/source-hubspot/source_hubspot/streams.py +++ b/airbyte-integrations/connectors/source-hubspot/source_hubspot/streams.py @@ -2,14 +2,13 @@ # Copyright (c) 2022 Airbyte, Inc., all rights reserved. # - +import json import sys import time -import urllib.parse from abc import ABC, abstractmethod -from functools import lru_cache +from functools import cached_property, lru_cache from http import HTTPStatus -from typing import Any, Dict, Iterable, Iterator, List, Mapping, MutableMapping, Optional, Set, Tuple, Union +from typing import Any, Dict, Iterable, List, Mapping, MutableMapping, Optional, Set, Tuple, Union import backoff import pendulum as pendulum @@ -18,15 +17,10 @@ from airbyte_cdk.models import SyncMode from airbyte_cdk.sources.streams.http import HttpStream from airbyte_cdk.sources.streams.http.requests_native_auth import Oauth2Authenticator -from airbyte_cdk.sources.utils.sentry import AirbyteSentry from airbyte_cdk.sources.utils.transform import TransformConfig, TypeTransformer from requests import codes from source_hubspot.errors import HubspotAccessDenied, HubspotInvalidAuth, HubspotRateLimited, HubspotTimeout -from source_hubspot.helpers import GroupByKey, IRecordPostProcessor, StoreAsIs - -# The value is obtained experimentally, HubSpot allows the URL length up to ~16300 symbols, -# so it was decided to limit the length of the `properties` parameter to 15000 characters. -PROPERTIES_PARAM_MAX_LENGTH = 15000 +from source_hubspot.helpers import APIv1Property, APIv3Property, GroupByKey, IRecordPostProcessor, IURLPropertyRepresentation, StoreAsIs # we got this when provided API Token has incorrect format CLOUDFLARE_ORIGIN_DNS_ERROR = 530 @@ -60,29 +54,6 @@ CUSTOM_FIELD_VALUE_TO_TYPE = {v: k for k, v in CUSTOM_FIELD_TYPE_TO_VALUE.items()} -def split_properties(properties_list: List[str]) -> Iterator[Tuple[str]]: - summary_length = 0 - local_properties = [] - for property_ in properties_list: - if len(property_) + summary_length + len(urllib.parse.quote(",")) >= PROPERTIES_PARAM_MAX_LENGTH: - yield local_properties - local_properties = [] - summary_length = 0 - - local_properties.append(property_) - summary_length += len(property_) + len(urllib.parse.quote(",")) - - if local_properties: - yield local_properties - - -def too_many_properties(properties_list: List[str]) -> bool: - # Do not iterate over the generator until the end. Here we need to know if it produces more than one record - generator = split_properties(properties_list) - _ = next(generator) - return next(generator, None) is not None - - def retry_connection_handler(**kwargs): """Retry helper, log each attempt""" @@ -259,6 +230,13 @@ def path( ) -> str: return self.url + @cached_property + def _property_wrapper(self) -> IURLPropertyRepresentation: + properties = list(self.properties.keys()) + if "v1" in self.url: + return APIv1Property(properties) + return APIv3Property(properties) + def __init__(self, api: API, start_date: str = None, credentials: Mapping[str, Any] = None, **kwargs): super().__init__(**kwargs) self._api: API = api @@ -290,12 +268,12 @@ def handle_request( stream_slice: Mapping[str, Any] = None, stream_state: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None, - params: Mapping[str, Any] = None, + properties: IURLPropertyRepresentation = None, ) -> requests.Response: request_headers = self.request_headers(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token) request_params = self.request_params(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token) - if params: - request_params.update(params) + if properties: + request_params.update(properties.as_url_param()) request = self._create_prepared_request( path=self.path(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token), @@ -321,7 +299,6 @@ def handle_request( def _read_stream_records( self, - properties_list: List[str], stream_slice: Mapping[str, Any] = None, stream_state: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None, @@ -337,10 +314,10 @@ def _read_stream_records( post_processor: IRecordPostProcessor = GroupByKey(self.primary_key) if group_by_pk else StoreAsIs() response = None - for properties in split_properties(properties_list): - params = {"property": properties} + properties = self._property_wrapper + for chunk in properties.split(): response = self.handle_request( - stream_slice=stream_slice, stream_state=stream_state, next_page_token=next_page_token, params=params + stream_slice=stream_slice, stream_state=stream_state, next_page_token=next_page_token, properties=chunk ) for record in self._transform(self.parse_response(response, stream_state=stream_state)): post_processor.add_record(record) @@ -359,41 +336,42 @@ def read_records( next_page_token = None try: - with AirbyteSentry.start_transaction("read_records", self.name), AirbyteSentry.start_transaction_span("read_records"): - while not pagination_complete: - - properties_list = list(self.properties.keys()) - if properties_list and too_many_properties(properties_list): - records, response = self._read_stream_records( - properties_list=properties_list, - stream_slice=stream_slice, - stream_state=stream_state, - next_page_token=next_page_token, - ) - else: - response = self.handle_request( - stream_slice=stream_slice, - stream_state=stream_state, - next_page_token=next_page_token, - params={"property": properties_list}, - ) - records = self._transform(self.parse_response(response, stream_state=stream_state, stream_slice=stream_slice)) - - if self.filter_old_records: - records = self._filter_old_records(records) - yield from records - - next_page_token = self.next_page_token(response) - if not next_page_token: - pagination_complete = True - - # Always return an empty generator just in case no records were ever yielded - yield from [] + while not pagination_complete: + + properties = self._property_wrapper + if properties and properties.too_many_properties: + records, response = self._read_stream_records( + stream_slice=stream_slice, + stream_state=stream_state, + next_page_token=next_page_token, + ) + else: + response = self.handle_request( + stream_slice=stream_slice, + stream_state=stream_state, + next_page_token=next_page_token, + properties=properties, + ) + records = self._transform(self.parse_response(response, stream_state=stream_state, stream_slice=stream_slice)) + + if self.filter_old_records: + records = self._filter_old_records(records) + yield from records + + next_page_token = self.next_page_token(response) + if not next_page_token: + pagination_complete = True + + # Always return an empty generator just in case no records were ever yielded + yield from [] except requests.exceptions.HTTPError as e: raise e def parse_response_error_message(self, response: requests.Response) -> Optional[str]: - body = response.json() + try: + body = response.json() + except json.decoder.JSONDecodeError: + return response.text if body.get("category") == "MISSING_SCOPES": if "errors" in body: errors = body["errors"] @@ -796,12 +774,12 @@ def search( def _process_search( self, - properties_list: List[str], stream_slice: Mapping[str, Any] = None, stream_state: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None, ) -> Tuple[List, requests.Response]: stream_records = {} + properties_list = list(self.properties.keys()) payload = ( { "filters": [{"value": int(self._state.timestamp() * 1000), "propertyName": self.last_modified_field, "operator": "GTE"}], @@ -833,47 +811,42 @@ def read_records( next_page_token = None latest_cursor = None - with AirbyteSentry.start_transaction("read_records", self.name), AirbyteSentry.start_transaction_span("read_records"): - while not pagination_complete: - properties_list = list(self.properties.keys()) - - if self.state: - records, raw_response = self._process_search( - properties_list, - next_page_token=next_page_token, - stream_state=stream_state, - stream_slice=stream_slice, - ) + while not pagination_complete: + if self.state: + records, raw_response = self._process_search( + next_page_token=next_page_token, + stream_state=stream_state, + stream_slice=stream_slice, + ) - else: - records, raw_response = self._read_stream_records( - properties_list=properties_list, - stream_slice=stream_slice, - stream_state=stream_state, - next_page_token=next_page_token, - ) - records = self._filter_old_records(records) - records = self._flat_associations(records) - - for record in records: - cursor = self._field_to_datetime(record[self.updated_at_field]) - latest_cursor = max(cursor, latest_cursor) if latest_cursor else cursor - yield record + else: + records, raw_response = self._read_stream_records( + stream_slice=stream_slice, + stream_state=stream_state, + next_page_token=next_page_token, + ) + records = self._filter_old_records(records) + records = self._flat_associations(records) + + for record in records: + cursor = self._field_to_datetime(record[self.updated_at_field]) + latest_cursor = max(cursor, latest_cursor) if latest_cursor else cursor + yield record + + next_page_token = self.next_page_token(raw_response) + if not next_page_token: + pagination_complete = True + elif self.state and next_page_token["payload"]["after"] >= 10000: + # Hubspot documentation states that the search endpoints are limited to 10,000 total results + # for any given query. Attempting to page beyond 10,000 will result in a 400 error. + # https://developers.hubspot.com/docs/api/crm/search. We stop getting data at 10,000 and + # start a new search query with the latest state that has been collected. + self._update_state(latest_cursor=latest_cursor) + next_page_token = None - next_page_token = self.next_page_token(raw_response) - if not next_page_token: - pagination_complete = True - elif self.state and next_page_token["payload"]["after"] >= 10000: - # Hubspot documentation states that the search endpoints are limited to 10,000 total results - # for any given query. Attempting to page beyond 10,000 will result in a 400 error. - # https://developers.hubspot.com/docs/api/crm/search. We stop getting data at 10,000 and - # start a new search query with the latest state that has been collected. - self._update_state(latest_cursor=latest_cursor) - next_page_token = None - - self._update_state(latest_cursor=latest_cursor) - # Always return an empty generator just in case no records were ever yielded - yield from [] + self._update_state(latest_cursor=latest_cursor) + # Always return an empty generator just in case no records were ever yielded + yield from [] def request_params( self, @@ -1178,33 +1151,33 @@ def read_records( next_page_token = None latest_cursor = None - with AirbyteSentry.start_transaction("read_records", self.name), AirbyteSentry.start_transaction_span("read_records"): - while not pagination_complete: - response = self.handle_request(stream_slice=stream_slice, stream_state=stream_state, next_page_token=next_page_token) - records = self._transform(self.parse_response(response, stream_state=stream_state, stream_slice=stream_slice)) - - if self.filter_old_records: - records = self._filter_old_records(records) - for record in records: - cursor = self._field_to_datetime(record[self.updated_at_field]) - latest_cursor = max(cursor, latest_cursor) if latest_cursor else cursor - yield record + while not pagination_complete: + response = self.handle_request(stream_slice=stream_slice, stream_state=stream_state, next_page_token=next_page_token) + records = self._transform(self.parse_response(response, stream_state=stream_state, stream_slice=stream_slice)) - next_page_token = self.next_page_token(response) - if self.state and next_page_token and next_page_token["offset"] >= 10000: - # As per Hubspot documentation, the recent engagements endpoint will only return the 10K - # most recently updated engagements. Since they are returned sorted by `lastUpdated` in - # descending order, we stop getting records if we have already reached 10,000. Attempting - # to get more than 10K will result in a HTTP 400 error. - # https://legacydocs.hubspot.com/docs/methods/engagements/get-recent-engagements - next_page_token = None - - if not next_page_token: - pagination_complete = True + if self.filter_old_records: + records = self._filter_old_records(records) - # Always return an empty generator just in case no records were ever yielded - yield from [] + for record in records: + cursor = self._field_to_datetime(record[self.updated_at_field]) + latest_cursor = max(cursor, latest_cursor) if latest_cursor else cursor + yield record + + next_page_token = self.next_page_token(response) + if self.state and next_page_token and next_page_token["offset"] >= 10000: + # As per Hubspot documentation, the recent engagements endpoint will only return the 10K + # most recently updated engagements. Since they are returned sorted by `lastUpdated` in + # descending order, we stop getting records if we have already reached 10,000. Attempting + # to get more than 10K will result in a HTTP 400 error. + # https://legacydocs.hubspot.com/docs/methods/engagements/get-recent-engagements + next_page_token = None + + if not next_page_token: + pagination_complete = True + + # Always return an empty generator just in case no records were ever yielded + yield from [] self._update_state(latest_cursor=latest_cursor) diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/test_source.py b/airbyte-integrations/connectors/source-hubspot/unit_tests/test_source.py index c49839d10aba..0a66137509c2 100644 --- a/airbyte-integrations/connectors/source-hubspot/unit_tests/test_source.py +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/test_source.py @@ -11,18 +11,9 @@ import pytest from airbyte_cdk.models import ConfiguredAirbyteCatalog, SyncMode, Type from source_hubspot.errors import HubspotRateLimited +from source_hubspot.helpers import APIv3Property from source_hubspot.source import SourceHubspot -from source_hubspot.streams import ( - API, - PROPERTIES_PARAM_MAX_LENGTH, - Companies, - Deals, - Engagements, - Products, - Stream, - Workflows, - split_properties, -) +from source_hubspot.streams import API, Companies, Deals, Engagements, Products, Stream, Workflows from .utils import read_full_refresh, read_incremental @@ -206,21 +197,13 @@ def get(self, url, api, params=None): response = api._session.get(api.BASE_URL + url, params=params) return api._parse_and_handle_errors(response) - def test_splitting_properties(self, fake_properties_list): - """ - Check that properties are split into multiple arrays - """ - for slice_property in split_properties(fake_properties_list): - slice_length = [len(item) for item in slice_property] - assert sum(slice_length) <= PROPERTIES_PARAM_MAX_LENGTH - def test_stream_with_splitting_properties(self, requests_mock, api, fake_properties_list, common_params): """ Check working stream `companies` with large list of properties using new functionality with splitting properties """ test_stream = Companies(**common_params) - parsed_properties = list(split_properties(fake_properties_list)) + parsed_properties = list(APIv3Property(fake_properties_list).split()) self.set_mock_properties(requests_mock, "/properties/v2/company/properties", fake_properties_list) record_ids_paginated = [list(map(str, range(100))), list(map(str, range(100, 150, 1)))] @@ -236,7 +219,7 @@ def test_stream_with_splitting_properties(self, requests_mock, api, fake_propert { "json": { "results": [ - {**self.BASE_OBJECT_BODY, **{"id": id, "properties": {p: "fake_data" for p in property_slice}}} + {**self.BASE_OBJECT_BODY, **{"id": id, "properties": {p: "fake_data" for p in property_slice.properties}}} for id in id_list ], "paging": {"next": {"after": id_list[-1]}} if len(id_list) == 100 else {}, @@ -244,10 +227,10 @@ def test_stream_with_splitting_properties(self, requests_mock, api, fake_propert "status_code": 200, } ] - property_param_set = "&".join([f"property={prop}" for prop in property_slice]) + prop_key, prop_val = next(iter(property_slice.as_url_param().items())) requests_mock.register_uri( "GET", - f"{test_stream_url}?limit=100&{property_param_set}{f'&after={after_id}' if after_id else ''}", + f"{test_stream_url}?limit=100&{prop_key}={prop_val}{f'&after={after_id}' if after_id else ''}", record_responses, ) after_id = id_list[-1] @@ -265,7 +248,7 @@ def test_stream_with_splitting_properties_with_pagination(self, requests_mock, c Check working stream `products` with large list of properties using new functionality with splitting properties """ - parsed_properties = list(split_properties(fake_properties_list)) + parsed_properties = list(APIv3Property(fake_properties_list).split()) self.set_mock_properties(requests_mock, "/properties/v2/product/properties", fake_properties_list) test_stream = Products(**common_params) @@ -275,7 +258,7 @@ def test_stream_with_splitting_properties_with_pagination(self, requests_mock, c { "json": { "results": [ - {**self.BASE_OBJECT_BODY, **{"id": id, "properties": {p: "fake_data" for p in property_slice}}} + {**self.BASE_OBJECT_BODY, **{"id": id, "properties": {p: "fake_data" for p in property_slice.properties}}} for id in ["6043593519", "1092593519", "1092593518", "1092593517", "1092593516"] ], "paging": {}, @@ -283,8 +266,8 @@ def test_stream_with_splitting_properties_with_pagination(self, requests_mock, c "status_code": 200, } ] - property_param_set = "&".join([f"property={prop}" for prop in property_slice]) - requests_mock.register_uri("GET", f"{test_stream.url}?{property_param_set}", record_responses) + prop_key, prop_val = next(iter(property_slice.as_url_param().items())) + requests_mock.register_uri("GET", f"{test_stream.url}?{prop_key}={prop_val}", record_responses) stream_records = list(test_stream.read_records(sync_mode=SyncMode.incremental)) @@ -297,7 +280,7 @@ def test_stream_with_splitting_properties_with_new_record(self, requests_mock, c Check working stream `workflows` with large list of properties using new functionality with splitting properties """ - parsed_properties = list(split_properties(fake_properties_list)) + parsed_properties = list(APIv3Property(fake_properties_list).split()) self.set_mock_properties(requests_mock, "/properties/v2/deal/properties", fake_properties_list) test_stream = Deals(**common_params) @@ -308,7 +291,7 @@ def test_stream_with_splitting_properties_with_new_record(self, requests_mock, c { "json": { "results": [ - {**self.BASE_OBJECT_BODY, **{"id": id, "properties": {p: "fake_data" for p in property_slice}}} + {**self.BASE_OBJECT_BODY, **{"id": id, "properties": {p: "fake_data" for p in property_slice.properties}}} for id in ids_list ], "paging": {}, @@ -317,8 +300,8 @@ def test_stream_with_splitting_properties_with_new_record(self, requests_mock, c } ] test_stream._sync_mode = SyncMode.full_refresh - property_param_set = "&".join([f"property={prop}" for prop in property_slice]) - requests_mock.register_uri("GET", f"{test_stream.url}?{property_param_set}", record_responses) + prop_key, prop_val = next(iter(property_slice.as_url_param().items())) + requests_mock.register_uri("GET", f"{test_stream.url}?{prop_key}={prop_val}", record_responses) test_stream._sync_mode = None ids_list.append("1092593513") diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/test_split_properties.py b/airbyte-integrations/connectors/source-hubspot/unit_tests/test_split_properties.py new file mode 100644 index 000000000000..fd199345c3f8 --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/test_split_properties.py @@ -0,0 +1,33 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import pytest +from source_hubspot.helpers import APIv1Property, APIv3Property + +lorem_ipsum = """Lorem ipsum dolor sit amet, consectetur adipiscing elit""" +lorem_ipsum = lorem_ipsum.lower().replace(",", "") + +many_properties = lorem_ipsum.split(" ") * 1000 +few_properties = ["firstname", "lastname", "age", "dob", "id"] + + +@pytest.mark.parametrize( + ("cls", "properties", "chunks_expected"), + ( + (APIv1Property, few_properties, 1), + (APIv3Property, few_properties, 1), + (APIv1Property, many_properties, 11), + (APIv3Property, many_properties, 5), + ), +) +def test_split_properties(cls, properties, chunks_expected): + chunked_properties = set() + index = 0 + for index, chunk in enumerate(cls(properties).split()): + chunked_properties |= set(chunk.properties) + as_string = next(iter(chunk.as_url_param().values())) + assert len(as_string) <= cls.PROPERTIES_PARAM_MAX_LENGTH + chunks = index + 1 + assert chunked_properties == set(properties) + assert chunks == chunks_expected diff --git a/airbyte-integrations/connectors/source-instagram/acceptance-test-config.yml b/airbyte-integrations/connectors/source-instagram/acceptance-test-config.yml index dbd0c9eba8b1..4760483b6fce 100644 --- a/airbyte-integrations/connectors/source-instagram/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-instagram/acceptance-test-config.yml @@ -22,3 +22,19 @@ tests: full_refresh: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" + ignored_fields: + "user_insights": + - email_contacts + - follower_count + - get_directions_clicks + - impressions + - phone_call_clicks + - profile_views + - reach + - text_message_clicks + - website_clicks + - impressions_week + - reach_week + - impressions_days_28 + - reach_days_28 + - online_followers \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-intercom/Dockerfile b/airbyte-integrations/connectors/source-intercom/Dockerfile index 7161dcc3e8ce..be364fb38929 100644 --- a/airbyte-integrations/connectors/source-intercom/Dockerfile +++ b/airbyte-integrations/connectors/source-intercom/Dockerfile @@ -35,5 +35,5 @@ COPY source_intercom ./source_intercom ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.19 +LABEL io.airbyte.version=0.1.20 LABEL io.airbyte.name=airbyte/source-intercom diff --git a/airbyte-integrations/connectors/source-intercom/source_intercom/schemas/contacts.json b/airbyte-integrations/connectors/source-intercom/source_intercom/schemas/contacts.json index 9a82a3708302..f49bf0ff03a5 100755 --- a/airbyte-integrations/connectors/source-intercom/source_intercom/schemas/contacts.json +++ b/airbyte-integrations/connectors/source-intercom/source_intercom/schemas/contacts.json @@ -68,6 +68,9 @@ "unsubscribed_from_emails": { "type": ["null", "boolean"] }, + "unsubscribed_from_sms": { + "type": ["null", "boolean"] + }, "created_at": { "type": ["null", "integer"] }, @@ -77,6 +80,9 @@ "signed_up_at": { "type": ["null", "integer"] }, + "sms_consent": { + "type": ["null", "boolean"] + }, "last_seen_at": { "type": ["null", "integer"] }, diff --git a/airbyte-integrations/connectors/source-jdbc/Dockerfile b/airbyte-integrations/connectors/source-jdbc/Dockerfile index 8f44d096053e..cfb402f8f23c 100644 --- a/airbyte-integrations/connectors/source-jdbc/Dockerfile +++ b/airbyte-integrations/connectors/source-jdbc/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-jdbc COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.3.3 +LABEL io.airbyte.version=0.3.4 LABEL io.airbyte.name=airbyte/source-jdbc diff --git a/airbyte-integrations/connectors/source-jdbc/build.gradle b/airbyte-integrations/connectors/source-jdbc/build.gradle index 91b7d93cfa8c..2e9393f32335 100644 --- a/airbyte-integrations/connectors/source-jdbc/build.gradle +++ b/airbyte-integrations/connectors/source-jdbc/build.gradle @@ -28,10 +28,10 @@ dependencies { testImplementation project(':airbyte-test-utils') testImplementation libs.postgresql - testImplementation libs.testcontainers.postgresql + testImplementation libs.connectors.testcontainers.postgresql integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') - integrationTestJavaImplementation libs.testcontainers.postgresql + integrationTestJavaImplementation libs.connectors.testcontainers.postgresql testFixturesImplementation "org.hamcrest:hamcrest-all:1.3" testFixturesImplementation project(':airbyte-protocol:protocol-models') diff --git a/airbyte-integrations/connectors/source-jdbc/src/test/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-jdbc/src/test/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSourceAcceptanceTest.java index 909194580404..b6c9c02598e0 100644 --- a/airbyte-integrations/connectors/source-jdbc/src/test/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-jdbc/src/test/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSourceAcceptanceTest.java @@ -15,8 +15,14 @@ import io.airbyte.integrations.base.IntegrationRunner; import io.airbyte.integrations.base.Source; import io.airbyte.integrations.source.jdbc.test.JdbcSourceAcceptanceTest; +import io.airbyte.integrations.source.relationaldb.models.CdcState; +import io.airbyte.protocol.models.AirbyteGlobalState; +import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; +import io.airbyte.protocol.models.AirbyteStreamState; import io.airbyte.test.utils.PostgreSQLContainerHelper; import java.sql.JDBCType; +import java.util.List; import java.util.Set; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; @@ -82,6 +88,11 @@ public String getDriverClass() { return PostgresTestSource.DRIVER_CLASS; } + @Override + protected boolean supportsPerStream() { + return true; + } + @AfterAll static void cleanUp() { PSQL_DB.close(); @@ -118,6 +129,27 @@ public Set getExcludedInternalNameSpaces() { return Set.of("information_schema", "pg_catalog", "pg_internal", "catalog_history"); } + // TODO This is a temporary override so that the Postgres source can take advantage of per-stream + // state + @Override + protected List generateEmptyInitialState(final JsonNode config) { + if (getSupportedStateType(config) == AirbyteStateType.GLOBAL) { + final AirbyteGlobalState globalState = new AirbyteGlobalState() + .withSharedState(Jsons.jsonNode(new CdcState())) + .withStreamStates(List.of()); + return List.of(new AirbyteStateMessage().withType(AirbyteStateType.GLOBAL).withGlobal(globalState)); + } else { + return List.of(new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState())); + } + } + + @Override + protected AirbyteStateType getSupportedStateType(final JsonNode config) { + return AirbyteStateType.STREAM; + } + public static void main(final String[] args) throws Exception { final Source source = new PostgresTestSource(); LOGGER.info("starting source: {}", PostgresTestSource.class); diff --git a/airbyte-integrations/connectors/source-jdbc/src/testFixtures/java/io/airbyte/integrations/source/jdbc/test/JdbcSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-jdbc/src/testFixtures/java/io/airbyte/integrations/source/jdbc/test/JdbcSourceAcceptanceTest.java index be29c888993f..4d0ee82fbc51 100644 --- a/airbyte-integrations/connectors/source-jdbc/src/testFixtures/java/io/airbyte/integrations/source/jdbc/test/JdbcSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-jdbc/src/testFixtures/java/io/airbyte/integrations/source/jdbc/test/JdbcSourceAcceptanceTest.java @@ -13,10 +13,6 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Lists; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.resources.MoreResources; @@ -39,7 +35,9 @@ import io.airbyte.protocol.models.AirbyteMessage.Type; import io.airbyte.protocol.models.AirbyteRecordMessage; import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.AirbyteStreamState; import io.airbyte.protocol.models.CatalogHelpers; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.ConfiguredAirbyteStream; @@ -47,6 +45,7 @@ import io.airbyte.protocol.models.DestinationSyncMode; import io.airbyte.protocol.models.Field; import io.airbyte.protocol.models.JsonSchemaType; +import io.airbyte.protocol.models.StreamDescriptor; import io.airbyte.protocol.models.SyncMode; import java.math.BigDecimal; import java.sql.SQLException; @@ -54,6 +53,7 @@ import java.util.Collections; import java.util.Comparator; import java.util.List; +import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.function.Function; @@ -82,7 +82,7 @@ public abstract class JdbcSourceAcceptanceTest { // otherwise parallel runs can interfere with each other public static String SCHEMA_NAME = Strings.addRandomSuffix("jdbc_integration_test1", "_", 5).toLowerCase(); public static String SCHEMA_NAME2 = Strings.addRandomSuffix("jdbc_integration_test2", "_", 5).toLowerCase(); - public static Set TEST_SCHEMAS = ImmutableSet.of(SCHEMA_NAME, SCHEMA_NAME2); + public static Set TEST_SCHEMAS = Set.of(SCHEMA_NAME, SCHEMA_NAME2); public static String TABLE_NAME = "id_and_name"; public static String TABLE_NAME_WITH_SPACES = "id and name"; @@ -255,7 +255,7 @@ public void setup() throws Exception { connection.createStatement().execute( createTableQuery(getFullyQualifiedTableName(TABLE_NAME_COMPOSITE_PK), COLUMN_CLAUSE_WITH_COMPOSITE_PK, - primaryKeyClause(ImmutableList.of("first_name", "last_name")))); + primaryKeyClause(List.of("first_name", "last_name")))); connection.createStatement().execute( String.format( "INSERT INTO %s(first_name, last_name, updated_at) VALUES ('first' ,'picard', '2004-10-19')", @@ -354,12 +354,15 @@ void testDiscoverWithMultipleSchemas() throws Exception { final AirbyteCatalog actual = source.discover(config); final AirbyteCatalog expected = getCatalog(getDefaultNamespace()); - expected.getStreams().add(CatalogHelpers + final List catalogStreams = new ArrayList<>(); + catalogStreams.addAll(expected.getStreams()); + catalogStreams.add(CatalogHelpers .createAirbyteStream(TABLE_NAME, SCHEMA_NAME2, Field.of(COL_ID, JsonSchemaType.STRING), Field.of(COL_NAME, JsonSchemaType.STRING)) - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL))); + .withSupportedSyncModes(List.of(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL))); + expected.setStreams(catalogStreams); // sort streams by name so that we are comparing lists with the same order. final Comparator schemaTableCompare = Comparator.comparing(stream -> stream.getNamespace() + "." + stream.getName()); expected.getStreams().sort(schemaTableCompare); @@ -388,6 +391,12 @@ void testReadOneColumn() throws Exception { setEmittedAtToNull(actualMessages); + final List expectedMessages = getAirbyteMessagesReadOneColumn(); + assertEquals(expectedMessages.size(), actualMessages.size()); + assertEquals(expectedMessages, actualMessages); + } + + protected List getAirbyteMessagesReadOneColumn() { final List expectedMessages = getTestMessages().stream() .map(Jsons::clone) .peek(m -> { @@ -397,9 +406,7 @@ void testReadOneColumn() throws Exception { convertIdBasedOnDatabase(m.getRecord().getData().get(COL_ID).asInt())); }) .collect(Collectors.toList()); - assertTrue(expectedMessages.size() == actualMessages.size()); - assertTrue(expectedMessages.containsAll(actualMessages)); - assertTrue(actualMessages.containsAll(expectedMessages)); + return expectedMessages; } @Test @@ -432,18 +439,7 @@ void testReadMultipleTables() throws Exception { Field.of(COL_ID, JsonSchemaType.NUMBER), Field.of(COL_NAME, JsonSchemaType.STRING))); - final List secondStreamExpectedMessages = getTestMessages() - .stream() - .map(Jsons::clone) - .peek(m -> { - m.getRecord().setStream(streamName2); - m.getRecord().setNamespace(getDefaultNamespace()); - ((ObjectNode) m.getRecord().getData()).remove(COL_UPDATED_AT); - ((ObjectNode) m.getRecord().getData()).replace(COL_ID, - convertIdBasedOnDatabase(m.getRecord().getData().get(COL_ID).asInt())); - }) - .collect(Collectors.toList()); - expectedMessages.addAll(secondStreamExpectedMessages); + expectedMessages.addAll(getAirbyteMessagesSecondSync(streamName2)); } final List actualMessages = MoreIterators @@ -451,9 +447,23 @@ void testReadMultipleTables() throws Exception { setEmittedAtToNull(actualMessages); - assertTrue(expectedMessages.size() == actualMessages.size()); - assertTrue(expectedMessages.containsAll(actualMessages)); - assertTrue(actualMessages.containsAll(expectedMessages)); + assertEquals(expectedMessages.size(), actualMessages.size()); + assertEquals(expectedMessages, actualMessages); + } + + protected List getAirbyteMessagesSecondSync(final String streamName2) { + return getTestMessages() + .stream() + .map(Jsons::clone) + .peek(m -> { + m.getRecord().setStream(streamName2); + m.getRecord().setNamespace(getDefaultNamespace()); + ((ObjectNode) m.getRecord().getData()).remove(COL_UPDATED_AT); + ((ObjectNode) m.getRecord().getData()).replace(COL_ID, + convertIdBasedOnDatabase(m.getRecord().getData().get(COL_ID).asInt())); + }) + .collect(Collectors.toList()); + } @Test @@ -461,7 +471,7 @@ void testTablesWithQuoting() throws Exception { final ConfiguredAirbyteStream streamForTableWithSpaces = createTableWithSpaces(); final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog() - .withStreams(Lists.newArrayList( + .withStreams(List.of( getConfiguredCatalogWithOneStream(getDefaultNamespace()).getStreams().get(0), streamForTableWithSpaces)); final List actualMessages = MoreIterators @@ -469,7 +479,15 @@ void testTablesWithQuoting() throws Exception { setEmittedAtToNull(actualMessages); - final List secondStreamExpectedMessages = getTestMessages() + final List expectedMessages = new ArrayList<>(getTestMessages()); + expectedMessages.addAll(getAirbyteMessagesForTablesWithQuoting(streamForTableWithSpaces)); + + assertEquals(expectedMessages.size(), actualMessages.size()); + assertEquals(expectedMessages, actualMessages); + } + + protected List getAirbyteMessagesForTablesWithQuoting(final ConfiguredAirbyteStream streamForTableWithSpaces) { + return getTestMessages() .stream() .map(Jsons::clone) .peek(m -> { @@ -481,12 +499,6 @@ void testTablesWithQuoting() throws Exception { convertIdBasedOnDatabase(m.getRecord().getData().get(COL_ID).asInt())); }) .collect(Collectors.toList()); - final List expectedMessages = new ArrayList<>(getTestMessages()); - expectedMessages.addAll(secondStreamExpectedMessages); - - assertTrue(expectedMessages.size() == actualMessages.size()); - assertTrue(expectedMessages.containsAll(actualMessages)); - assertTrue(actualMessages.containsAll(expectedMessages)); } @SuppressWarnings("ResultOfMethodCallIgnored") @@ -495,7 +507,7 @@ void testReadFailure() { final ConfiguredAirbyteStream spiedAbStream = spy( getConfiguredCatalogWithOneStream(getDefaultNamespace()).getStreams().get(0)); final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog() - .withStreams(Lists.newArrayList(spiedAbStream)); + .withStreams(List.of(spiedAbStream)); doCallRealMethod().doThrow(new RuntimeException()).when(spiedAbStream).getStream(); assertThrows(RuntimeException.class, () -> source.read(config, catalog, null)); @@ -507,7 +519,7 @@ void testIncrementalNoPreviousState() throws Exception { COL_ID, null, "3", - Lists.newArrayList(getTestMessages())); + getTestMessages()); } @Test @@ -516,7 +528,7 @@ void testIncrementalIntCheckCursor() throws Exception { COL_ID, "2", "3", - Lists.newArrayList(getTestMessages().get(2))); + List.of(getTestMessages().get(2))); } @Test @@ -525,13 +537,24 @@ void testIncrementalStringCheckCursor() throws Exception { COL_NAME, "patent", "vash", - Lists.newArrayList(getTestMessages().get(0), getTestMessages().get(2))); + List.of(getTestMessages().get(0), getTestMessages().get(2))); } @Test void testIncrementalStringCheckCursorSpaceInColumnName() throws Exception { final ConfiguredAirbyteStream streamWithSpaces = createTableWithSpaces(); + final List expectedRecordMessages = getAirbyteMessagesCheckCursorSpaceInColumnName(streamWithSpaces); + incrementalCursorCheck( + COL_LAST_NAME_WITH_SPACE, + COL_LAST_NAME_WITH_SPACE, + "patent", + "vash", + expectedRecordMessages, + streamWithSpaces); + } + + protected List getAirbyteMessagesCheckCursorSpaceInColumnName(final ConfiguredAirbyteStream streamWithSpaces) { final AirbyteMessage firstMessage = getTestMessages().get(0); firstMessage.getRecord().setStream(streamWithSpaces.getStream().getName()); ((ObjectNode) firstMessage.getRecord().getData()).remove(COL_UPDATED_AT); @@ -544,28 +567,20 @@ void testIncrementalStringCheckCursorSpaceInColumnName() throws Exception { ((ObjectNode) secondMessage.getRecord().getData()).set(COL_LAST_NAME_WITH_SPACE, ((ObjectNode) secondMessage.getRecord().getData()).remove(COL_NAME)); - Lists.newArrayList(getTestMessages().get(0), getTestMessages().get(2)); - - incrementalCursorCheck( - COL_LAST_NAME_WITH_SPACE, - COL_LAST_NAME_WITH_SPACE, - "patent", - "vash", - Lists.newArrayList(firstMessage, secondMessage), - streamWithSpaces); + return List.of(firstMessage, secondMessage); } @Test - void testIncrementalTimestampCheckCursor() throws Exception { - incrementalTimestampCheck(); + void testIncrementalDateCheckCursor() throws Exception { + incrementalDateCheck(); } - protected void incrementalTimestampCheck() throws Exception { + protected void incrementalDateCheck() throws Exception { incrementalCursorCheck( COL_UPDATED_AT, "2005-10-18T00:00:00Z", "2006-10-19T00:00:00Z", - Lists.newArrayList(getTestMessages().get(1), getTestMessages().get(2))); + List.of(getTestMessages().get(1), getTestMessages().get(2))); } @Test @@ -578,7 +593,7 @@ void testIncrementalCursorChanges() throws Exception { // records to (incorrectly) be filtered out. "data", "vash", - Lists.newArrayList(getTestMessages())); + getTestMessages()); } @Test @@ -587,31 +602,21 @@ void testReadOneTableIncrementallyTwice() throws Exception { final ConfiguredAirbyteCatalog configuredCatalog = getConfiguredCatalogWithOneStream(namespace); configuredCatalog.getStreams().forEach(airbyteStream -> { airbyteStream.setSyncMode(SyncMode.INCREMENTAL); - airbyteStream.setCursorField(Lists.newArrayList(COL_ID)); + airbyteStream.setCursorField(List.of(COL_ID)); airbyteStream.setDestinationSyncMode(DestinationSyncMode.APPEND); }); - final DbState state = new DbState() - .withStreams(Lists.newArrayList(new DbStreamState().withStreamName(streamName).withStreamNamespace(namespace))); final List actualMessagesFirstSync = MoreIterators - .toList(source.read(config, configuredCatalog, Jsons.jsonNode(state))); + .toList(source.read(config, configuredCatalog, createEmptyState(streamName, namespace))); final Optional stateAfterFirstSyncOptional = actualMessagesFirstSync.stream() .filter(r -> r.getType() == Type.STATE).findFirst(); assertTrue(stateAfterFirstSyncOptional.isPresent()); - database.execute(connection -> { - connection.createStatement().execute( - String.format("INSERT INTO %s(id, name, updated_at) VALUES (4,'riker', '2006-10-19')", - getFullyQualifiedTableName(TABLE_NAME))); - connection.createStatement().execute( - String.format("INSERT INTO %s(id, name, updated_at) VALUES (5, 'data', '2006-10-19')", - getFullyQualifiedTableName(TABLE_NAME))); - }); + executeStatementReadIncrementallyTwice(); final List actualMessagesSecondSync = MoreIterators - .toList(source.read(config, configuredCatalog, - stateAfterFirstSyncOptional.get().getState().getData())); + .toList(source.read(config, configuredCatalog, extractState(stateAfterFirstSyncOptional.get()))); assertEquals(2, (int) actualMessagesSecondSync.stream().filter(r -> r.getType() == Type.RECORD).count()); @@ -619,35 +624,41 @@ void testReadOneTableIncrementallyTwice() throws Exception { setEmittedAtToNull(actualMessagesSecondSync); - assertTrue(expectedMessages.size() == actualMessagesSecondSync.size()); - assertTrue(expectedMessages.containsAll(actualMessagesSecondSync)); - assertTrue(actualMessagesSecondSync.containsAll(expectedMessages)); + assertEquals(expectedMessages.size(), actualMessagesSecondSync.size()); + assertEquals(expectedMessages, actualMessagesSecondSync); + } + + protected void executeStatementReadIncrementallyTwice() throws SQLException { + database.execute(connection -> { + connection.createStatement().execute( + String.format("INSERT INTO %s(id, name, updated_at) VALUES (4,'riker', '2006-10-19')", + getFullyQualifiedTableName(TABLE_NAME))); + connection.createStatement().execute( + String.format("INSERT INTO %s(id, name, updated_at) VALUES (5, 'data', '2006-10-19')", + getFullyQualifiedTableName(TABLE_NAME))); + }); } - protected List getExpectedAirbyteMessagesSecondSync(String namespace) { + protected List getExpectedAirbyteMessagesSecondSync(final String namespace) { final List expectedMessages = new ArrayList<>(); expectedMessages.add(new AirbyteMessage().withType(Type.RECORD) .withRecord(new AirbyteRecordMessage().withStream(streamName).withNamespace(namespace) - .withData(Jsons.jsonNode(ImmutableMap + .withData(Jsons.jsonNode(Map .of(COL_ID, ID_VALUE_4, COL_NAME, "riker", COL_UPDATED_AT, "2006-10-19T00:00:00Z"))))); expectedMessages.add(new AirbyteMessage().withType(Type.RECORD) .withRecord(new AirbyteRecordMessage().withStream(streamName).withNamespace(namespace) - .withData(Jsons.jsonNode(ImmutableMap + .withData(Jsons.jsonNode(Map .of(COL_ID, ID_VALUE_5, COL_NAME, "data", COL_UPDATED_AT, "2006-10-19T00:00:00Z"))))); - expectedMessages.add(new AirbyteMessage() - .withType(Type.STATE) - .withState(new AirbyteStateMessage() - .withData(Jsons.jsonNode(new DbState() - .withCdc(false) - .withStreams(Lists.newArrayList(new DbStreamState() - .withStreamName(streamName) - .withStreamNamespace(namespace) - .withCursorField(ImmutableList.of(COL_ID)) - .withCursor("5"))))))); + final DbStreamState state = new DbStreamState() + .withStreamName(streamName) + .withStreamNamespace(namespace) + .withCursorField(List.of(COL_ID)) + .withCursor("5"); + expectedMessages.addAll(createExpectedTestMessages(List.of(state))); return expectedMessages; } @@ -679,14 +690,12 @@ void testReadMultipleTablesIncrementally() throws Exception { Field.of(COL_NAME, JsonSchemaType.STRING))); configuredCatalog.getStreams().forEach(airbyteStream -> { airbyteStream.setSyncMode(SyncMode.INCREMENTAL); - airbyteStream.setCursorField(Lists.newArrayList(COL_ID)); + airbyteStream.setCursorField(List.of(COL_ID)); airbyteStream.setDestinationSyncMode(DestinationSyncMode.APPEND); }); - final DbState state = new DbState() - .withStreams(Lists.newArrayList(new DbStreamState().withStreamName(streamName).withStreamNamespace(namespace))); final List actualMessagesFirstSync = MoreIterators - .toList(source.read(config, configuredCatalog, Jsons.jsonNode(state))); + .toList(source.read(config, configuredCatalog, createEmptyState(streamName, namespace))); // get last state message. final Optional stateAfterFirstSyncOptional = actualMessagesFirstSync.stream() @@ -696,7 +705,46 @@ void testReadMultipleTablesIncrementally() throws Exception { // we know the second streams messages are the same as the first minus the updated at column. so we // cheat and generate the expected messages off of the first expected messages. - final List secondStreamExpectedMessages = getTestMessages() + final List secondStreamExpectedMessages = getAirbyteMessagesSecondStreamWithNamespace(streamName2); + + // Represents the state after the first stream has been updated + final List expectedStateStreams1 = List.of( + new DbStreamState() + .withStreamName(streamName) + .withStreamNamespace(namespace) + .withCursorField(List.of(COL_ID)) + .withCursor("3"), + new DbStreamState() + .withStreamName(streamName2) + .withStreamNamespace(namespace) + .withCursorField(List.of(COL_ID))); + + // Represents the state after both streams have been updated + final List expectedStateStreams2 = List.of( + new DbStreamState() + .withStreamName(streamName) + .withStreamNamespace(namespace) + .withCursorField(List.of(COL_ID)) + .withCursor("3"), + new DbStreamState() + .withStreamName(streamName2) + .withStreamNamespace(namespace) + .withCursorField(List.of(COL_ID)) + .withCursor("3")); + + final List expectedMessagesFirstSync = new ArrayList<>(getTestMessages()); + expectedMessagesFirstSync.add(createStateMessage(expectedStateStreams1.get(0), expectedStateStreams1)); + expectedMessagesFirstSync.addAll(secondStreamExpectedMessages); + expectedMessagesFirstSync.add(createStateMessage(expectedStateStreams2.get(1), expectedStateStreams2)); + + setEmittedAtToNull(actualMessagesFirstSync); + + assertEquals(expectedMessagesFirstSync.size(), actualMessagesFirstSync.size()); + assertEquals(expectedMessagesFirstSync, actualMessagesFirstSync); + } + + protected List getAirbyteMessagesSecondStreamWithNamespace(final String streamName2) { + return getTestMessages() .stream() .map(Jsons::clone) .peek(m -> { @@ -706,46 +754,6 @@ void testReadMultipleTablesIncrementally() throws Exception { convertIdBasedOnDatabase(m.getRecord().getData().get(COL_ID).asInt())); }) .collect(Collectors.toList()); - final List expectedMessagesFirstSync = new ArrayList<>(getTestMessages()); - expectedMessagesFirstSync.add(new AirbyteMessage() - .withType(Type.STATE) - .withState(new AirbyteStateMessage() - .withData(Jsons.jsonNode(new DbState() - .withCdc(false) - .withStreams(Lists.newArrayList( - new DbStreamState() - .withStreamName(streamName) - .withStreamNamespace(namespace) - .withCursorField(ImmutableList.of(COL_ID)) - .withCursor("3"), - new DbStreamState() - .withStreamName(streamName2) - .withStreamNamespace(namespace) - .withCursorField(ImmutableList.of(COL_ID)))))))); - - expectedMessagesFirstSync.addAll(secondStreamExpectedMessages); - expectedMessagesFirstSync.add(new AirbyteMessage() - .withType(Type.STATE) - .withState(new AirbyteStateMessage() - .withData(Jsons.jsonNode(new DbState() - .withCdc(false) - .withStreams(Lists.newArrayList( - new DbStreamState() - .withStreamName(streamName) - .withStreamNamespace(namespace) - .withCursorField(ImmutableList.of(COL_ID)) - .withCursor("3"), - new DbStreamState() - .withStreamName(streamName2) - .withStreamNamespace(namespace) - .withCursorField(ImmutableList.of(COL_ID)) - .withCursor("3"))))))); - - setEmittedAtToNull(actualMessagesFirstSync); - - assertTrue(expectedMessagesFirstSync.size() == actualMessagesFirstSync.size()); - assertTrue(expectedMessagesFirstSync.containsAll(actualMessagesFirstSync)); - assertTrue(actualMessagesFirstSync.containsAll(expectedMessagesFirstSync)); } // when initial and final cursor fields are the same. @@ -780,39 +788,34 @@ private void incrementalCursorCheck( final ConfiguredAirbyteStream airbyteStream) throws Exception { airbyteStream.setSyncMode(SyncMode.INCREMENTAL); - airbyteStream.setCursorField(Lists.newArrayList(cursorField)); + airbyteStream.setCursorField(List.of(cursorField)); airbyteStream.setDestinationSyncMode(DestinationSyncMode.APPEND); - final DbState state = new DbState() - .withStreams(Lists.newArrayList(new DbStreamState() - .withStreamName(airbyteStream.getStream().getName()) - .withStreamNamespace(airbyteStream.getStream().getNamespace()) - .withCursorField(ImmutableList.of(initialCursorField)) - .withCursor(initialCursorValue))); - final ConfiguredAirbyteCatalog configuredCatalog = new ConfiguredAirbyteCatalog() - .withStreams(ImmutableList.of(airbyteStream)); + .withStreams(List.of(airbyteStream)); + + final DbStreamState dbStreamState = new DbStreamState() + .withStreamName(airbyteStream.getStream().getName()) + .withStreamNamespace(airbyteStream.getStream().getNamespace()) + .withCursorField(List.of(initialCursorField)) + .withCursor(initialCursorValue); final List actualMessages = MoreIterators - .toList(source.read(config, configuredCatalog, Jsons.jsonNode(state))); + .toList(source.read(config, configuredCatalog, Jsons.jsonNode(createState(List.of(dbStreamState))))); setEmittedAtToNull(actualMessages); + final List expectedStreams = List.of( + new DbStreamState() + .withStreamName(airbyteStream.getStream().getName()) + .withStreamNamespace(airbyteStream.getStream().getNamespace()) + .withCursorField(List.of(cursorField)) + .withCursor(endCursorValue)); final List expectedMessages = new ArrayList<>(expectedRecordMessages); - expectedMessages.add(new AirbyteMessage() - .withType(Type.STATE) - .withState(new AirbyteStateMessage() - .withData(Jsons.jsonNode(new DbState() - .withCdc(false) - .withStreams(Lists.newArrayList(new DbStreamState() - .withStreamName(airbyteStream.getStream().getName()) - .withStreamNamespace(airbyteStream.getStream().getNamespace()) - .withCursorField(ImmutableList.of(cursorField)) - .withCursor(endCursorValue))))))); - - assertTrue(expectedMessages.size() == actualMessages.size()); - assertTrue(expectedMessages.containsAll(actualMessages)); - assertTrue(actualMessages.containsAll(expectedMessages)); + expectedMessages.addAll(createExpectedTestMessages(expectedStreams)); + + assertEquals(expectedMessages.size(), actualMessages.size()); + assertEquals(expectedMessages, actualMessages); } // get catalog and perform a defensive copy. @@ -826,14 +829,14 @@ protected ConfiguredAirbyteCatalog getConfiguredCatalogWithOneStream(final Strin } protected AirbyteCatalog getCatalog(final String defaultNamespace) { - return new AirbyteCatalog().withStreams(Lists.newArrayList( + return new AirbyteCatalog().withStreams(List.of( CatalogHelpers.createAirbyteStream( TABLE_NAME, defaultNamespace, Field.of(COL_ID, JsonSchemaType.NUMBER), Field.of(COL_NAME, JsonSchemaType.STRING), Field.of(COL_UPDATED_AT, JsonSchemaType.STRING)) - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) + .withSupportedSyncModes(List.of(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) .withSourceDefinedPrimaryKey(List.of(List.of(COL_ID))), CatalogHelpers.createAirbyteStream( TABLE_NAME_WITHOUT_PK, @@ -841,7 +844,7 @@ protected AirbyteCatalog getCatalog(final String defaultNamespace) { Field.of(COL_ID, JsonSchemaType.NUMBER), Field.of(COL_NAME, JsonSchemaType.STRING), Field.of(COL_UPDATED_AT, JsonSchemaType.STRING)) - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) + .withSupportedSyncModes(List.of(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) .withSourceDefinedPrimaryKey(Collections.emptyList()), CatalogHelpers.createAirbyteStream( TABLE_NAME_COMPOSITE_PK, @@ -849,34 +852,62 @@ protected AirbyteCatalog getCatalog(final String defaultNamespace) { Field.of(COL_FIRST_NAME, JsonSchemaType.STRING), Field.of(COL_LAST_NAME, JsonSchemaType.STRING), Field.of(COL_UPDATED_AT, JsonSchemaType.STRING)) - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) + .withSupportedSyncModes(List.of(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) .withSourceDefinedPrimaryKey( List.of(List.of(COL_FIRST_NAME), List.of(COL_LAST_NAME))))); } protected List getTestMessages() { - return Lists.newArrayList( + return List.of( new AirbyteMessage().withType(Type.RECORD) .withRecord(new AirbyteRecordMessage().withStream(streamName).withNamespace(getDefaultNamespace()) - .withData(Jsons.jsonNode(ImmutableMap + .withData(Jsons.jsonNode(Map .of(COL_ID, ID_VALUE_1, COL_NAME, "picard", COL_UPDATED_AT, "2004-10-19T00:00:00Z")))), new AirbyteMessage().withType(Type.RECORD) .withRecord(new AirbyteRecordMessage().withStream(streamName).withNamespace(getDefaultNamespace()) - .withData(Jsons.jsonNode(ImmutableMap + .withData(Jsons.jsonNode(Map .of(COL_ID, ID_VALUE_2, COL_NAME, "crusher", COL_UPDATED_AT, "2005-10-19T00:00:00Z")))), new AirbyteMessage().withType(Type.RECORD) .withRecord(new AirbyteRecordMessage().withStream(streamName).withNamespace(getDefaultNamespace()) - .withData(Jsons.jsonNode(ImmutableMap + .withData(Jsons.jsonNode(Map .of(COL_ID, ID_VALUE_3, COL_NAME, "vash", COL_UPDATED_AT, "2006-10-19T00:00:00Z"))))); } + protected List createExpectedTestMessages(final List states) { + return supportsPerStream() + ? states.stream() + .map(s -> new AirbyteMessage().withType(Type.STATE) + .withState( + new AirbyteStateMessage().withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withNamespace(s.getStreamNamespace()).withName(s.getStreamName())) + .withStreamState(Jsons.jsonNode(s))) + .withData(Jsons.jsonNode(new DbState().withCdc(false).withStreams(states))))) + .collect( + Collectors.toList()) + : List.of(new AirbyteMessage().withType(Type.STATE).withState(new AirbyteStateMessage().withType(AirbyteStateType.LEGACY) + .withData(Jsons.jsonNode(new DbState().withCdc(false).withStreams(states))))); + } + + protected List createState(final List states) { + return supportsPerStream() + ? states.stream() + .map(s -> new AirbyteStateMessage().withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withNamespace(s.getStreamNamespace()).withName(s.getStreamName())) + .withStreamState(Jsons.jsonNode(s)))) + .collect( + Collectors.toList()) + : List.of(new AirbyteStateMessage().withType(AirbyteStateType.LEGACY).withData(Jsons.jsonNode(new DbState().withStreams(states)))); + } + protected ConfiguredAirbyteStream createTableWithSpaces() throws SQLException { final String tableNameWithSpaces = TABLE_NAME_WITH_SPACES + "2"; final String streamName2 = tableNameWithSpaces; @@ -967,4 +998,67 @@ protected static void setEmittedAtToNull(final Iterable messages } } + /** + * Tests whether the connector under test supports the per-stream state format or should use the + * legacy format for data generated by this test. + * + * @return {@code true} if the connector supports the per-stream state format or {@code false} if it + * does not support the per-stream state format (e.g. legacy format supported). Default + * value is {@code false}. + */ + protected boolean supportsPerStream() { + return false; + } + + /** + * Creates empty state with the provided stream name and namespace. + * + * @param streamName The stream name. + * @param streamNamespace The stream namespace. + * @return {@link JsonNode} representation of the generated empty state. + */ + protected JsonNode createEmptyState(final String streamName, final String streamNamespace) { + if (supportsPerStream()) { + final AirbyteStateMessage airbyteStateMessage = new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName(streamName).withNamespace(streamNamespace))); + return Jsons.jsonNode(List.of(airbyteStateMessage)); + } else { + final DbState dbState = new DbState() + .withStreams(List.of(new DbStreamState().withStreamName(streamName).withStreamNamespace(streamNamespace))); + return Jsons.jsonNode(dbState); + } + } + + /** + * Extracts the state component from the provided {@link AirbyteMessage} based on the value returned + * by {@link #supportsPerStream()}. + * + * @param airbyteMessage An {@link AirbyteMessage} that contains state. + * @return A {@link JsonNode} representation of the state contained in the {@link AirbyteMessage}. + */ + protected JsonNode extractState(final AirbyteMessage airbyteMessage) { + if (supportsPerStream()) { + return Jsons.jsonNode(List.of(airbyteMessage.getState())); + } else { + return airbyteMessage.getState().getData(); + } + } + + protected AirbyteMessage createStateMessage(final DbStreamState dbStreamState, final List legacyStates) { + if (supportsPerStream()) { + return new AirbyteMessage().withType(Type.STATE) + .withState( + new AirbyteStateMessage().withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withNamespace(dbStreamState.getStreamNamespace()) + .withName(dbStreamState.getStreamName())) + .withStreamState(Jsons.jsonNode(dbStreamState))) + .withData(Jsons.jsonNode(new DbState().withCdc(false).withStreams(legacyStates)))); + } else { + return new AirbyteMessage().withType(Type.STATE).withState(new AirbyteStateMessage().withType(AirbyteStateType.LEGACY) + .withData(Jsons.jsonNode(new DbState().withCdc(false).withStreams(legacyStates)))); + } + } + } diff --git a/airbyte-integrations/connectors/source-kafka/Dockerfile b/airbyte-integrations/connectors/source-kafka/Dockerfile index b1283bd5a042..b34d30c35565 100644 --- a/airbyte-integrations/connectors/source-kafka/Dockerfile +++ b/airbyte-integrations/connectors/source-kafka/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-kafka COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.6 +LABEL io.airbyte.version=0.1.7 LABEL io.airbyte.name=airbyte/source-kafka diff --git a/airbyte-integrations/connectors/source-kafka/build.gradle b/airbyte-integrations/connectors/source-kafka/build.gradle index a7acc9c44b71..028ea061692b 100644 --- a/airbyte-integrations/connectors/source-kafka/build.gradle +++ b/airbyte-integrations/connectors/source-kafka/build.gradle @@ -19,7 +19,7 @@ dependencies { integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:source-kafka') - integrationTestJavaImplementation libs.testcontainers.kafka + integrationTestJavaImplementation libs.connectors.testcontainers.kafka implementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) } diff --git a/airbyte-integrations/connectors/source-linkedin-ads/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-linkedin-ads/integration_tests/invalid_config.json index 67e8909837f2..ba5acdbb0d66 100644 --- a/airbyte-integrations/connectors/source-linkedin-ads/integration_tests/invalid_config.json +++ b/airbyte-integrations/connectors/source-linkedin-ads/integration_tests/invalid_config.json @@ -1,10 +1,10 @@ { "start_date": "2021-08-01", - "account_ids": [1,2], + "account_ids": [1, 2], "credentials": { "auth_method": "oAuth2.0", "client_id": "client_id", "client_secret": "client_secret", "refresh_token": "refresh_token" } -} \ No newline at end of file +} diff --git a/airbyte-integrations/connectors/source-marketo/Dockerfile b/airbyte-integrations/connectors/source-marketo/Dockerfile index 83d335c1d40b..b84ad46b586f 100644 --- a/airbyte-integrations/connectors/source-marketo/Dockerfile +++ b/airbyte-integrations/connectors/source-marketo/Dockerfile @@ -34,5 +34,5 @@ COPY source_marketo ./source_marketo ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.3 +LABEL io.airbyte.version=0.1.4 LABEL io.airbyte.name=airbyte/source-marketo diff --git a/airbyte-integrations/connectors/source-marketo/acceptance-test-config.yml b/airbyte-integrations/connectors/source-marketo/acceptance-test-config.yml index 4fe9da86cfff..583c5ba56660 100644 --- a/airbyte-integrations/connectors/source-marketo/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-marketo/acceptance-test-config.yml @@ -14,7 +14,7 @@ tests: basic_read: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" - empty_streams: [] + empty_streams: ["activities_visit_webpage"] timeout_seconds: 3600 expect_records: path: "integration_tests/expected_records.txt" diff --git a/airbyte-integrations/connectors/source-marketo/integration_tests/expected_records.txt b/airbyte-integrations/connectors/source-marketo/integration_tests/expected_records.txt index 90bdb0ecdb91..81f2152caff2 100644 --- a/airbyte-integrations/connectors/source-marketo/integration_tests/expected_records.txt +++ b/airbyte-integrations/connectors/source-marketo/integration_tests/expected_records.txt @@ -1,11 +1,11 @@ -{"stream": "programs", "data": {"id": 1016, "name": "123", "description": "", "createdAt": "2021-09-01T16:02:30Z", "updatedAt": "2021-09-01T16:06:57Z", "url": "https://app-sj32.marketo.com/#EBP1016A1", "type": "Email", "channel": "Email Send", "folder": {"type": "Program", "value": 1003, "folderName": "API Test Program"}, "status": "unlocked", "workspace": "Default"}, "emitted_at": 1638527519000} -{"stream": "programs", "data": {"id": 1017, "name": "air", "description": "", "createdAt": "2021-09-01T16:09:23Z", "updatedAt": "2021-09-01T16:09:23Z", "url": "https://app-sj32.marketo.com/#EBP1017A1", "type": "Email", "channel": "Email Send", "folder": {"type": "Program", "value": 1003, "folderName": "API Test Program"}, "status": "unlocked", "workspace": "Default"}, "emitted_at": 1638527519000} -{"stream": "programs", "data": {"id": 1003, "name": "API Test Program", "description": "Sample API Program", "createdAt": "2021-01-18T13:55:44Z", "updatedAt": "2021-09-01T16:19:32Z", "url": "https://app-sj32.marketo.com/#PG1003A1", "type": "Default", "channel": "Online Advertising", "folder": {"type": "Folder", "value": 45, "folderName": "Active Marketing Programs"}, "status": "", "workspace": "Default"}, "emitted_at": 1638527519000} -{"stream": "programs", "data": {"id": 1018, "name": "Jean Lafleur", "description": "", "createdAt": "2021-09-08T12:49:49Z", "updatedAt": "2021-09-08T12:49:49Z", "url": "https://app-sj32.marketo.com/#PG1018A1", "type": "Default", "channel": "Email Blast", "folder": {"type": "Folder", "value": 52, "folderName": "Web Forms"}, "status": "", "workspace": "Default"}, "emitted_at": 1638527519000} -{"stream": "programs", "data": {"id": 1019, "name": "Test", "description": "", "createdAt": "2021-09-08T12:59:25Z", "updatedAt": "2021-09-08T12:59:25Z", "url": "https://app-sj32.marketo.com/#PG1019A1", "type": "Default", "channel": "Email Blast", "folder": {"type": "Folder", "value": 52, "folderName": "Web Forms"}, "status": "", "workspace": "Default"}, "emitted_at": 1638527519000} -{"stream": "programs", "data": {"id": 1020, "name": "TEST1", "description": "", "createdAt": "2021-09-08T13:21:41Z", "updatedAt": "2021-09-08T13:21:41Z", "url": "https://app-sj32.marketo.com/#PG1020A1", "type": "Default", "channel": "Email Blast", "folder": {"type": "Folder", "value": 52, "folderName": "Web Forms"}, "status": "", "workspace": "Default"}, "emitted_at": 1638527519000} -{"stream": "programs", "data": {"id": 1021, "name": "TEST_23", "description": "This is for Test", "createdAt": "2021-09-09T09:00:21Z", "updatedAt": "2021-09-09T09:00:22Z", "url": "https://app-sj32.marketo.com/#PG1021A1", "type": "Default", "channel": "Email Blast", "folder": {"type": "Folder", "value": 52, "folderName": "Web Forms"}, "status": "", "workspace": "Default"}, "emitted_at": 1638527519000} -{"stream": "programs", "data": {"id": 1022, "name": "Test_Demo1", "description": "bla bla", "createdAt": "2021-09-09T14:40:14Z", "updatedAt": "2021-09-09T14:40:14Z", "url": "https://app-sj32.marketo.com/#PG1022A1", "type": "Default", "channel": "Email Blast", "folder": {"type": "Folder", "value": 52, "folderName": "Web Forms"}, "status": "", "workspace": "Default"}, "emitted_at": 1638527519000} +{"stream": "programs", "data": {"id": 1016, "name": "123", "description": "", "createdAt": "2021-09-01T16:02:30Z", "updatedAt": "2022-06-21T06:50:32Z", "url": "https://app-sj32.marketo.com/#EBP1016A1", "type": "Email", "channel": "Email Send", "folder": {"type": "Program", "value": 1003, "folderName": "API Test Program"}, "status": "locked", "workspace": "Default", "headStart": false}, "emitted_at": 1655800476224} +{"stream": "programs", "data": {"id": 1017, "name": "air", "description": "", "createdAt": "2021-09-01T16:09:23Z", "updatedAt": "2022-06-21T06:51:01Z", "url": "https://app-sj32.marketo.com/#EBP1017A1", "type": "Email", "channel": "Email Send", "folder": {"type": "Program", "value": 1003, "folderName": "API Test Program"}, "status": "locked", "workspace": "Default", "headStart": false}, "emitted_at": 1655800476226} +{"stream": "programs", "data": {"id": 1003, "name": "API Test Program", "description": "Sample API Program", "createdAt": "2021-01-18T13:55:44Z", "updatedAt": "2022-06-21T06:54:59Z", "url": "https://app-sj32.marketo.com/#PG1003A1", "type": "Default", "channel": "Email Blast", "folder": {"type": "Folder", "value": 45, "folderName": "Active Marketing Programs"}, "status": "", "workspace": "Default", "headStart": false}, "emitted_at": 1655800476226} +{"stream": "programs", "data": {"id": 1018, "name": "Jean Lafleur", "description": "", "createdAt": "2021-09-08T12:49:49Z", "updatedAt": "2022-06-21T06:53:28Z", "url": "https://app-sj32.marketo.com/#PG1018A1", "type": "Default", "channel": "Online Advertising", "folder": {"type": "Folder", "value": 52, "folderName": "Web Forms"}, "status": "", "workspace": "Default", "headStart": false}, "emitted_at": 1655800476227} +{"stream": "programs", "data": {"id": 1019, "name": "Test", "description": "", "createdAt": "2021-09-08T12:59:25Z", "updatedAt": "2022-06-21T06:53:45Z", "url": "https://app-sj32.marketo.com/#PG1019A1", "type": "Default", "channel": "List Import", "folder": {"type": "Folder", "value": 52, "folderName": "Web Forms"}, "status": "", "workspace": "Default", "headStart": false}, "emitted_at": 1655800476227} +{"stream": "programs", "data": {"id": 1020, "name": "TEST1", "description": "", "createdAt": "2021-09-08T13:21:41Z", "updatedAt": "2022-06-21T06:54:03Z", "url": "https://app-sj32.marketo.com/#PG1020A1", "type": "Default", "channel": "Operational", "folder": {"type": "Folder", "value": 52, "folderName": "Web Forms"}, "status": "", "workspace": "Default", "headStart": false}, "emitted_at": 1655800476227} +{"stream": "programs", "data": {"id": 1021, "name": "TEST_23", "description": "This is for Test", "createdAt": "2021-09-09T09:00:21Z", "updatedAt": "2022-06-21T06:54:16Z", "url": "https://app-sj32.marketo.com/#PG1021A1", "type": "Default", "channel": "Web Content", "folder": {"type": "Folder", "value": 52, "folderName": "Web Forms"}, "status": "", "workspace": "Default", "headStart": false}, "emitted_at": 1655800476227} +{"stream": "programs", "data": {"id": 1022, "name": "Test_Demo1", "description": "bla bla", "createdAt": "2021-09-09T14:40:14Z", "updatedAt": "2022-06-21T06:54:29Z", "url": "https://app-sj32.marketo.com/#PG1022A1", "type": "Default", "channel": "Web Request", "folder": {"type": "Folder", "value": 52, "folderName": "Web Forms"}, "status": "", "workspace": "Default", "headStart": false}, "emitted_at": 1655800476227} {"stream": "campaigns", "data": {"id": 1019, "name": "Form Smart Campaign", "type": "trigger", "programName": "Form Program", "programId": 1002, "workspaceName": "Default", "createdAt": "2020-04-09T20:18:24Z", "updatedAt": "2020-10-22T09:03:44Z", "active": false}, "emitted_at": 1638527708000} {"stream": "campaigns", "data": {"id": 1020, "name": "Smart Campaign number 02", "description": "This is a smart campaign creation test.", "type": "batch", "workspaceName": "Default", "createdAt": "2021-01-18T13:37:24Z", "updatedAt": "2021-01-19T22:50:17Z", "active": false}, "emitted_at": 1638527708000} {"stream": "campaigns", "data": {"id": 1021, "name": "Smart Campaign 03", "description": "This is a smart campaign creation test.", "type": "batch", "workspaceName": "Default", "createdAt": "2021-01-18T13:38:53Z", "updatedAt": "2021-01-18T13:38:53Z", "active": false}, "emitted_at": 1638527708000} @@ -19,7 +19,7 @@ {"stream": "campaigns", "data": {"id": 1029, "name": "Smart Campaign Number 8", "description": "This is a smart campaign creation test.", "type": "batch", "workspaceName": "Default", "createdAt": "2021-01-18T13:48:48Z", "updatedAt": "2021-01-18T13:48:48Z", "active": false}, "emitted_at": 1638527708000} {"stream": "campaigns", "data": {"id": 1030, "name": "Smart Campaign Number 9", "description": "This is a smart campaign creation test.", "type": "batch", "workspaceName": "Default", "createdAt": "2021-01-18T13:48:49Z", "updatedAt": "2021-01-18T13:48:49Z", "active": false}, "emitted_at": 1638527708000} {"stream": "campaigns", "data": {"id": 1031, "name": "Smart Campaign Number 10", "description": "This is a smart campaign creation test.", "type": "batch", "workspaceName": "Default", "createdAt": "2021-01-18T13:48:50Z", "updatedAt": "2021-01-18T13:48:50Z", "active": false}, "emitted_at": 1638527708000} -{"stream": "lists", "data": {"id": 1001, "name": "Test list", "workspaceId": 1, "workspaceName": "Default", "createdAt": "2021-01-19T20:27:23Z", "updatedAt": "2021-01-19T20:27:24Z"}, "emitted_at": 1638527852000} +{"stream": "lists", "data": {"id": 1001, "name": "Test list", "workspaceId": 1, "workspaceName": "Default", "createdAt": "2021-01-19T20:27:23Z", "updatedAt": "2022-06-21T06:58:01Z"}, "emitted_at": 1638527852000} {"stream": "lists", "data": {"id": 1002, "name": "Test list number 1", "workspaceId": 1, "workspaceName": "Default", "createdAt": "2021-01-19T20:28:00Z", "updatedAt": "2021-01-19T21:55:54Z"}, "emitted_at": 1638527852000} {"stream": "lists", "data": {"id": 1003, "name": "Test list number 2", "workspaceId": 1, "workspaceName": "Default", "createdAt": "2021-01-19T20:28:07Z", "updatedAt": "2021-01-19T20:28:09Z"}, "emitted_at": 1638527852000} {"stream": "lists", "data": {"id": 1004, "name": "Test list number 3", "workspaceId": 1, "workspaceName": "Default", "createdAt": "2021-01-19T20:28:13Z", "updatedAt": "2021-01-19T20:28:15Z"}, "emitted_at": 1638527852000} @@ -34,10 +34,8 @@ {"stream": "lists", "data": {"id": 1012, "name": "airbyte", "programName": "EM - Auteur - v1", "workspaceId": 1, "workspaceName": "Default", "createdAt": "2021-09-02T09:30:58Z", "updatedAt": "2021-09-02T09:30:59Z"}, "emitted_at": 1638527852000} {"stream": "lists", "data": {"id": 1012, "name": "airbyte", "programName": "EM - Auteur - v1", "workspaceId": 1, "workspaceName": "Default", "createdAt": "2021-09-02T09:30:58Z", "updatedAt": "2021-09-02T09:30:59Z"}, "emitted_at": 1638527852000} {"stream": "lists", "data": {"id": 1012, "name": "airbyte", "programName": "EM - Auteur - v1", "workspaceId": 1, "workspaceName": "Default", "createdAt": "2021-09-02T09:30:58Z", "updatedAt": "2021-09-02T09:30:59Z"}, "emitted_at": 1638527853000} -{"stream": "leads", "data": {"company": null, "site": null, "billingStreet": null, "billingCity": null, "billingState": null, "billingCountry": null, "billingPostalCode": null, "website": null, "mainPhone": null, "annualRevenue": null, "numberOfEmployees": null, "industry": null, "sicCode": null, "mktoCompanyNotes": null, "externalCompanyId": null, "id": 863, "mktoName": "Test-1", "personType": "contact", "mktoIsPartner": false, "isLead": true, "mktoIsCustomer": false, "isAnonymous": false, "salutation": null, "firstName": "Test-1", "middleName": null, "lastName": null, "email": "test-1@test.com", "phone": null, "mobilePhone": null, "fax": null, "title": null, "contactCompany": "77", "dateOfBirth": null, "address": null, "city": null, "state": null, "country": null, "postalCode": "00000", "personTimeZone": null, "originalSourceType": "Web service API", "originalSourceInfo": "Web service API", "registrationSourceType": "Web service API", "registrationSourceInfo": "Web service API", "originalSearchEngine": null, "originalSearchPhrase": null, "originalReferrer": null, "emailInvalid": false, "emailInvalidCause": null, "unsubscribed": false, "unsubscribedReason": null, "doNotCall": false, "mktoDoNotCallCause": null, "doNotCallReason": null, "marketingSuspended": false, "marketingSuspendedCause": null, "blackListed": false, "blackListedCause": null, "mktoPersonNotes": null, "anonymousIP": null, "inferredCompany": null, "inferredCountry": null, "inferredCity": null, "inferredStateRegion": null, "inferredPostalCode": null, "inferredMetropolitanArea": null, "inferredPhoneAreaCode": null, "emailSuspended": null, "emailSuspendedCause": null, "emailSuspendedAt": null, "department": null, "createdAt": "2021-08-23T12:35:27Z", "updatedAt": "2021-08-23T12:35:27Z", "cookies": null, "externalSalesPersonId": null, "leadPerson": "863", "leadRole": null, "leadSource": null, "leadStatus": null, "leadScore": null, "urgency": null, "priority": null, "relativeScore": null, "relativeUrgency": null, "rating": null, "personPrimaryLeadInterest": "863", "leadPartitionId": "1", "leadRevenueCycleModelId": null, "leadRevenueStageId": null, "acquisitionProgramId": null, "mktoAcquisitionDate": null}, "emitted_at": 1638529087000} -{"stream": "leads", "data": {"company": "Airbyte", "site": null, "billingStreet": null, "billingCity": null, "billingState": null, "billingCountry": null, "billingPostalCode": null, "website": null, "mainPhone": null, "annualRevenue": null, "numberOfEmployees": null, "industry": null, "sicCode": null, "mktoCompanyNotes": null, "externalCompanyId": null, "id": 864, "mktoName": "yuriiyurii", "personType": "contact", "mktoIsPartner": false, "isLead": true, "mktoIsCustomer": false, "isAnonymous": false, "salutation": null, "firstName": "yuriiyurii", "middleName": null, "lastName": null, "email": "integration-test@airbyte.io", "phone": null, "mobilePhone": null, "fax": null, "title": null, "contactCompany": "78", "dateOfBirth": null, "address": null, "city": null, "state": null, "country": null, "postalCode": null, "personTimeZone": null, "originalSourceType": "New lead", "originalSourceInfo": null, "registrationSourceType": "New lead", "registrationSourceInfo": null, "originalSearchEngine": null, "originalSearchPhrase": null, "originalReferrer": "http://mkto-sj320154.com/u/NjAyLUVVTy01OTgAAAF_QLVQN_CmMgjmeDlv2KOH8SvdmQFkcr5E7bB6_u9nyy4qyi8TLSRagKEl2yDz4A8JdOXvOps=", "emailInvalid": false, "emailInvalidCause": null, "unsubscribed": true, "unsubscribedReason": null, "doNotCall": false, "mktoDoNotCallCause": null, "doNotCallReason": null, "marketingSuspended": false, "marketingSuspendedCause": null, "blackListed": false, "blackListedCause": null, "mktoPersonNotes": null, "anonymousIP": null, "inferredCompany": null, "inferredCountry": null, "inferredCity": null, "inferredStateRegion": null, "inferredPostalCode": null, "inferredMetropolitanArea": null, "inferredPhoneAreaCode": null, "emailSuspended": null, "emailSuspendedCause": null, "emailSuspendedAt": null, "department": null, "createdAt": "2021-09-01T14:09:58Z", "updatedAt": "2021-09-01T14:47:26Z", "cookies": "_mch-marketo.com-1630506111294-76141,_mch-marketo.com-1630507625996-85446,_mch-marketo.com-1630509534684-98098,_mch-marketo.com-1630509805945-33648,_mch-marketo.com-1630514099902-54557", "externalSalesPersonId": null, "leadPerson": "864", "leadRole": null, "leadSource": null, "leadStatus": null, "leadScore": null, "urgency": null, "priority": null, "relativeScore": null, "relativeUrgency": null, "rating": null, "personPrimaryLeadInterest": "864", "leadPartitionId": "1", "leadRevenueCycleModelId": null, "leadRevenueStageId": null, "acquisitionProgramId": null, "mktoAcquisitionDate": null}, "emitted_at": 1638529087000} -{"stream": "leads", "data": {"company": "Airbyte", "site": null, "billingStreet": null, "billingCity": null, "billingState": null, "billingCountry": null, "billingPostalCode": null, "website": "airbyte.io", "mainPhone": null, "annualRevenue": null, "numberOfEmployees": null, "industry": null, "sicCode": null, "mktoCompanyNotes": null, "externalCompanyId": null, "id": 866, "mktoName": "yurii yurii", "personType": "contact", "mktoIsPartner": false, "isLead": true, "mktoIsCustomer": false, "isAnonymous": false, "salutation": null, "firstName": "yurii", "middleName": null, "lastName": "yurii", "email": "integration-test@airbyte.io", "phone": null, "mobilePhone": null, "fax": null, "title": null, "contactCompany": "79", "dateOfBirth": null, "address": null, "city": null, "state": null, "country": null, "postalCode": null, "personTimeZone": null, "originalSourceType": "New lead", "originalSourceInfo": null, "registrationSourceType": "New lead", "registrationSourceInfo": null, "originalSearchEngine": null, "originalSearchPhrase": null, "originalReferrer": "http://na-sj32.marketo.com/lp/datalineaedev/UnsubscribePage.html?mkt_unsubscribe=1&mkt_tok=NjAyLUVVTy01OTgAAAF_QLVRDCgLykiaUiUq2HHzdAieIK6v1qqh8ssBkS0UG5PAMCUj-e56dwddm82ciLtx9jCsvAndW4xV5GaiveYVSKEql_F4eao37V3Za92pqCFJOV9sXpl69DnXdozZk1WLLGBcUtTujEgBGL87", "emailInvalid": false, "emailInvalidCause": null, "unsubscribed": true, "unsubscribedReason": null, "doNotCall": false, "mktoDoNotCallCause": null, "doNotCallReason": null, "marketingSuspended": false, "marketingSuspendedCause": null, "blackListed": false, "blackListedCause": null, "mktoPersonNotes": null, "anonymousIP": "93.177.75.198", "inferredCompany": null, "inferredCountry": null, "inferredCity": null, "inferredStateRegion": null, "inferredPostalCode": null, "inferredMetropolitanArea": null, "inferredPhoneAreaCode": null, "emailSuspended": null, "emailSuspendedCause": null, "emailSuspendedAt": null, "department": null, "createdAt": "2021-09-01T14:38:02Z", "updatedAt": "2021-09-01T14:47:37Z", "cookies": null, "externalSalesPersonId": null, "leadPerson": "866", "leadRole": null, "leadSource": null, "leadStatus": null, "leadScore": null, "urgency": null, "priority": null, "relativeScore": null, "relativeUrgency": null, "rating": null, "personPrimaryLeadInterest": "866", "leadPartitionId": "1", "leadRevenueCycleModelId": null, "leadRevenueStageId": null, "acquisitionProgramId": null, "mktoAcquisitionDate": null}, "emitted_at": 1638529087000} -{"stream": "leads", "data": {"company": "Airbyte", "site": null, "billingStreet": null, "billingCity": null, "billingState": null, "billingCountry": null, "billingPostalCode": null, "website": null, "mainPhone": null, "annualRevenue": null, "numberOfEmployees": null, "industry": null, "sicCode": null, "mktoCompanyNotes": null, "externalCompanyId": null, "id": 867, "mktoName": "Yurii Yurii", "personType": "contact", "mktoIsPartner": false, "isLead": true, "mktoIsCustomer": false, "isAnonymous": false, "salutation": null, "firstName": "Yurii", "middleName": null, "lastName": "Yurii", "email": "yurii.cherniaiev@globallogic.com", "phone": null, "mobilePhone": null, "fax": null, "title": null, "contactCompany": "80", "dateOfBirth": null, "address": null, "city": null, "state": null, "country": null, "postalCode": null, "personTimeZone": null, "originalSourceType": "New lead", "originalSourceInfo": null, "registrationSourceType": "New lead", "registrationSourceInfo": null, "originalSearchEngine": null, "originalSearchPhrase": null, "originalReferrer": null, "emailInvalid": false, "emailInvalidCause": null, "unsubscribed": false, "unsubscribedReason": null, "doNotCall": false, "mktoDoNotCallCause": null, "doNotCallReason": null, "marketingSuspended": false, "marketingSuspendedCause": null, "blackListed": false, "blackListedCause": null, "mktoPersonNotes": null, "anonymousIP": null, "inferredCompany": null, "inferredCountry": null, "inferredCity": null, "inferredStateRegion": null, "inferredPostalCode": null, "inferredMetropolitanArea": null, "inferredPhoneAreaCode": null, "emailSuspended": null, "emailSuspendedCause": null, "emailSuspendedAt": null, "department": null, "createdAt": "2021-09-01T15:21:44Z", "updatedAt": "2021-09-01T15:21:44Z", "cookies": null, "externalSalesPersonId": null, "leadPerson": "867", "leadRole": null, "leadSource": null, "leadStatus": null, "leadScore": null, "urgency": null, "priority": null, "relativeScore": null, "relativeUrgency": null, "rating": null, "personPrimaryLeadInterest": "867", "leadPartitionId": "1", "leadRevenueCycleModelId": null, "leadRevenueStageId": null, "acquisitionProgramId": null, "mktoAcquisitionDate": null}, "emitted_at": 1638529087000} -{"stream": "leads", "data": {"company": "Airbyte", "site": null, "billingStreet": null, "billingCity": null, "billingState": null, "billingCountry": null, "billingPostalCode": null, "website": null, "mainPhone": null, "annualRevenue": null, "numberOfEmployees": null, "industry": null, "sicCode": null, "mktoCompanyNotes": null, "externalCompanyId": null, "id": 868, "mktoName": "Yurii Yurii", "personType": "contact", "mktoIsPartner": false, "isLead": true, "mktoIsCustomer": false, "isAnonymous": false, "salutation": null, "firstName": "Yurii", "middleName": null, "lastName": "Yurii", "email": "yurii.cherniaiev@globallogic.com", "phone": null, "mobilePhone": null, "fax": null, "title": null, "contactCompany": "81", "dateOfBirth": null, "address": null, "city": null, "state": null, "country": null, "postalCode": null, "personTimeZone": null, "originalSourceType": "New lead", "originalSourceInfo": null, "registrationSourceType": "New lead", "registrationSourceInfo": null, "originalSearchEngine": null, "originalSearchPhrase": null, "originalReferrer": null, "emailInvalid": false, "emailInvalidCause": null, "unsubscribed": false, "unsubscribedReason": null, "doNotCall": false, "mktoDoNotCallCause": null, "doNotCallReason": null, "marketingSuspended": false, "marketingSuspendedCause": null, "blackListed": false, "blackListedCause": null, "mktoPersonNotes": null, "anonymousIP": null, "inferredCompany": null, "inferredCountry": null, "inferredCity": null, "inferredStateRegion": null, "inferredPostalCode": null, "inferredMetropolitanArea": null, "inferredPhoneAreaCode": null, "emailSuspended": null, "emailSuspendedCause": null, "emailSuspendedAt": null, "department": null, "createdAt": "2021-09-01T15:22:28Z", "updatedAt": "2021-09-01T15:22:28Z", "cookies": null, "externalSalesPersonId": null, "leadPerson": "868", "leadRole": null, "leadSource": null, "leadStatus": null, "leadScore": null, "urgency": null, "priority": null, "relativeScore": null, "relativeUrgency": null, "rating": null, "personPrimaryLeadInterest": "868", "leadPartitionId": "1", "leadRevenueCycleModelId": null, "leadRevenueStageId": null, "acquisitionProgramId": null, "mktoAcquisitionDate": null}, "emitted_at": 1638529087000} -{"stream": "leads", "data": {"company": "Airbyte", "site": null, "billingStreet": null, "billingCity": null, "billingState": null, "billingCountry": null, "billingPostalCode": null, "website": null, "mainPhone": null, "annualRevenue": null, "numberOfEmployees": null, "industry": null, "sicCode": null, "mktoCompanyNotes": null, "externalCompanyId": null, "id": 869, "mktoName": "Yurii Yurii", "personType": "contact", "mktoIsPartner": false, "isLead": true, "mktoIsCustomer": false, "isAnonymous": false, "salutation": null, "firstName": "Yurii", "middleName": null, "lastName": "Yurii", "email": "yurii.chenriaiev@globallogic.com", "phone": null, "mobilePhone": null, "fax": null, "title": null, "contactCompany": "82", "dateOfBirth": null, "address": null, "city": null, "state": null, "country": null, "postalCode": null, "personTimeZone": null, "originalSourceType": "New lead", "originalSourceInfo": null, "registrationSourceType": "New lead", "registrationSourceInfo": null, "originalSearchEngine": null, "originalSearchPhrase": null, "originalReferrer": null, "emailInvalid": false, "emailInvalidCause": null, "unsubscribed": false, "unsubscribedReason": null, "doNotCall": false, "mktoDoNotCallCause": null, "doNotCallReason": null, "marketingSuspended": false, "marketingSuspendedCause": null, "blackListed": false, "blackListedCause": null, "mktoPersonNotes": null, "anonymousIP": null, "inferredCompany": null, "inferredCountry": null, "inferredCity": null, "inferredStateRegion": null, "inferredPostalCode": null, "inferredMetropolitanArea": null, "inferredPhoneAreaCode": null, "emailSuspended": null, "emailSuspendedCause": null, "emailSuspendedAt": null, "department": null, "createdAt": "2021-09-01T15:23:07Z", "updatedAt": "2021-09-01T15:23:07Z", "cookies": null, "externalSalesPersonId": null, "leadPerson": "869", "leadRole": null, "leadSource": null, "leadStatus": null, "leadScore": null, "urgency": null, "priority": null, "relativeScore": null, "relativeUrgency": null, "rating": null, "personPrimaryLeadInterest": "869", "leadPartitionId": "1", "leadRevenueCycleModelId": null, "leadRevenueStageId": null, "acquisitionProgramId": null, "mktoAcquisitionDate": null}, "emitted_at": 1638529087000} -{"stream": "leads", "data": {"company": null, "site": null, "billingStreet": null, "billingCity": null, "billingState": null, "billingCountry": null, "billingPostalCode": null, "website": null, "mainPhone": null, "annualRevenue": null, "numberOfEmployees": null, "industry": null, "sicCode": null, "mktoCompanyNotes": null, "externalCompanyId": null, "id": 875, "mktoName": "TEST-1-1", "personType": "contact", "mktoIsPartner": false, "isLead": true, "mktoIsCustomer": false, "isAnonymous": false, "salutation": null, "firstName": "TEST-1-1", "middleName": null, "lastName": null, "email": "test-test-test@test.com", "phone": null, "mobilePhone": null, "fax": null, "title": null, "contactCompany": "83", "dateOfBirth": null, "address": null, "city": null, "state": null, "country": null, "postalCode": "1111", "personTimeZone": null, "originalSourceType": "Web service API", "originalSourceInfo": "Web service API", "registrationSourceType": "Web service API", "registrationSourceInfo": "Web service API", "originalSearchEngine": null, "originalSearchPhrase": null, "originalReferrer": null, "emailInvalid": false, "emailInvalidCause": null, "unsubscribed": false, "unsubscribedReason": null, "doNotCall": false, "mktoDoNotCallCause": null, "doNotCallReason": null, "marketingSuspended": false, "marketingSuspendedCause": null, "blackListed": false, "blackListedCause": null, "mktoPersonNotes": null, "anonymousIP": null, "inferredCompany": null, "inferredCountry": null, "inferredCity": null, "inferredStateRegion": null, "inferredPostalCode": null, "inferredMetropolitanArea": null, "inferredPhoneAreaCode": null, "emailSuspended": null, "emailSuspendedCause": null, "emailSuspendedAt": null, "department": null, "createdAt": "2021-11-08T22:03:32Z", "updatedAt": "2021-11-08T22:03:32Z", "cookies": null, "externalSalesPersonId": null, "leadPerson": "875", "leadRole": null, "leadSource": null, "leadStatus": null, "leadScore": null, "urgency": null, "priority": null, "relativeScore": null, "relativeUrgency": null, "rating": null, "personPrimaryLeadInterest": "875", "leadPartitionId": "1", "leadRevenueCycleModelId": null, "leadRevenueStageId": null, "acquisitionProgramId": null, "mktoAcquisitionDate": null}, "emitted_at": 1638529399000} +{"stream": "leads", "data": {"company": "Airbyte", "site": null, "billingStreet": null, "billingCity": null, "billingState": null, "billingCountry": null, "billingPostalCode": null, "website": null, "mainPhone": null, "annualRevenue": null, "numberOfEmployees": null, "industry": null, "sicCode": null, "mktoCompanyNotes": null, "externalCompanyId": null, "id": 876, "mktoName": "Expecto Patronum", "personType": "contact", "mktoIsPartner": false, "isLead": true, "mktoIsCustomer": false, "isAnonymous": false, "salutation": null, "firstName": "Expecto", "middleName": null, "lastName": "Patronum", "email": "expecto@patronum.com", "phone": null, "mobilePhone": null, "fax": null, "title": null, "contactCompany": "84", "dateOfBirth": null, "address": null, "city": null, "state": null, "country": null, "postalCode": null, "personTimeZone": null, "originalSourceType": "New lead", "originalSourceInfo": null, "registrationSourceType": "New lead", "registrationSourceInfo": null, "originalSearchEngine": null, "originalSearchPhrase": null, "originalReferrer": null, "emailInvalid": false, "emailInvalidCause": null, "unsubscribed": false, "unsubscribedReason": null, "doNotCall": false, "mktoDoNotCallCause": null, "doNotCallReason": null, "marketingSuspended": false, "marketingSuspendedCause": null, "blackListed": false, "blackListedCause": null, "mktoPersonNotes": null, "anonymousIP": null, "inferredCompany": null, "inferredCountry": null, "inferredCity": null, "inferredStateRegion": null, "inferredPostalCode": null, "inferredMetropolitanArea": null, "inferredPhoneAreaCode": null, "emailSuspended": null, "emailSuspendedCause": null, "emailSuspendedAt": null, "department": null, "createdAt": "2022-06-21T07:49:25Z", "updatedAt": "2022-06-21T07:50:05Z", "cookies": null, "externalSalesPersonId": null, "leadPerson": "876", "leadRole": null, "leadSource": null, "leadStatus": null, "leadScore": null, "urgency": null, "priority": null, "relativeScore": null, "relativeUrgency": null, "rating": null, "personPrimaryLeadInterest": "876", "leadPartitionId": "1", "leadRevenueCycleModelId": null, "leadRevenueStageId": null, "acquisitionProgramId": null, "mktoAcquisitionDate": null}, "emitted_at": 1655800613397} +{"stream": "leads", "data": {"company": "FedEx", "site": null, "billingStreet": null, "billingCity": null, "billingState": null, "billingCountry": null, "billingPostalCode": null, "website": null, "mainPhone": null, "annualRevenue": null, "numberOfEmployees": null, "industry": null, "sicCode": null, "mktoCompanyNotes": null, "externalCompanyId": null, "id": 877, "mktoName": "Frodo Baggins", "personType": "contact", "mktoIsPartner": false, "isLead": true, "mktoIsCustomer": false, "isAnonymous": false, "salutation": null, "firstName": "Frodo", "middleName": null, "lastName": "Baggins", "email": "frodo@baggins.com", "phone": null, "mobilePhone": null, "fax": null, "title": null, "contactCompany": "85", "dateOfBirth": null, "address": null, "city": null, "state": null, "country": null, "postalCode": null, "personTimeZone": null, "originalSourceType": "New lead", "originalSourceInfo": null, "registrationSourceType": "New lead", "registrationSourceInfo": null, "originalSearchEngine": null, "originalSearchPhrase": null, "originalReferrer": null, "emailInvalid": false, "emailInvalidCause": null, "unsubscribed": false, "unsubscribedReason": null, "doNotCall": false, "mktoDoNotCallCause": null, "doNotCallReason": null, "marketingSuspended": false, "marketingSuspendedCause": null, "blackListed": false, "blackListedCause": null, "mktoPersonNotes": null, "anonymousIP": null, "inferredCompany": null, "inferredCountry": null, "inferredCity": null, "inferredStateRegion": null, "inferredPostalCode": null, "inferredMetropolitanArea": null, "inferredPhoneAreaCode": null, "emailSuspended": null, "emailSuspendedCause": null, "emailSuspendedAt": null, "department": null, "createdAt": "2022-06-21T08:30:55Z", "updatedAt": "2022-06-21T08:30:55Z", "cookies": null, "externalSalesPersonId": null, "leadPerson": "877", "leadRole": null, "leadSource": null, "leadStatus": null, "leadScore": null, "urgency": null, "priority": null, "relativeScore": null, "relativeUrgency": null, "rating": null, "personPrimaryLeadInterest": "877", "leadPartitionId": "1", "leadRevenueCycleModelId": null, "leadRevenueStageId": null, "acquisitionProgramId": null, "mktoAcquisitionDate": null}, "emitted_at": 1655800613399} +{"stream": "leads", "data": {"company": "PizzaHouse", "site": null, "billingStreet": null, "billingCity": null, "billingState": null, "billingCountry": null, "billingPostalCode": null, "website": null, "mainPhone": null, "annualRevenue": null, "numberOfEmployees": null, "industry": null, "sicCode": null, "mktoCompanyNotes": null, "externalCompanyId": null, "id": 878, "mktoName": "Peter Petegrew", "personType": "contact", "mktoIsPartner": false, "isLead": true, "mktoIsCustomer": false, "isAnonymous": false, "salutation": null, "firstName": "Peter", "middleName": null, "lastName": "Petegrew", "email": "peter@petegrew.com", "phone": null, "mobilePhone": null, "fax": null, "title": null, "contactCompany": "86", "dateOfBirth": null, "address": null, "city": null, "state": null, "country": null, "postalCode": null, "personTimeZone": null, "originalSourceType": "New lead", "originalSourceInfo": null, "registrationSourceType": "New lead", "registrationSourceInfo": null, "originalSearchEngine": null, "originalSearchPhrase": null, "originalReferrer": null, "emailInvalid": false, "emailInvalidCause": null, "unsubscribed": false, "unsubscribedReason": null, "doNotCall": false, "mktoDoNotCallCause": null, "doNotCallReason": null, "marketingSuspended": false, "marketingSuspendedCause": null, "blackListed": false, "blackListedCause": null, "mktoPersonNotes": null, "anonymousIP": null, "inferredCompany": null, "inferredCountry": null, "inferredCity": null, "inferredStateRegion": null, "inferredPostalCode": null, "inferredMetropolitanArea": null, "inferredPhoneAreaCode": null, "emailSuspended": null, "emailSuspendedCause": null, "emailSuspendedAt": null, "department": null, "createdAt": "2022-06-21T08:31:42Z", "updatedAt": "2022-06-21T08:31:42Z", "cookies": null, "externalSalesPersonId": null, "leadPerson": "878", "leadRole": null, "leadSource": null, "leadStatus": null, "leadScore": null, "urgency": null, "priority": null, "relativeScore": null, "relativeUrgency": null, "rating": null, "personPrimaryLeadInterest": "878", "leadPartitionId": "1", "leadRevenueCycleModelId": null, "leadRevenueStageId": null, "acquisitionProgramId": null, "mktoAcquisitionDate": null}, "emitted_at": 1655800613400} +{"stream": "leads", "data": {"company": "SportLife", "site": null, "billingStreet": null, "billingCity": null, "billingState": null, "billingCountry": null, "billingPostalCode": null, "website": null, "mainPhone": null, "annualRevenue": null, "numberOfEmployees": null, "industry": null, "sicCode": null, "mktoCompanyNotes": null, "externalCompanyId": null, "id": 879, "mktoName": "Dudley Dursley", "personType": "contact", "mktoIsPartner": false, "isLead": true, "mktoIsCustomer": false, "isAnonymous": false, "salutation": null, "firstName": "Dudley", "middleName": null, "lastName": "Dursley", "email": "dudley@dursley.com", "phone": null, "mobilePhone": null, "fax": null, "title": null, "contactCompany": "87", "dateOfBirth": null, "address": null, "city": null, "state": null, "country": null, "postalCode": null, "personTimeZone": null, "originalSourceType": "New lead", "originalSourceInfo": null, "registrationSourceType": "New lead", "registrationSourceInfo": null, "originalSearchEngine": null, "originalSearchPhrase": null, "originalReferrer": null, "emailInvalid": false, "emailInvalidCause": null, "unsubscribed": false, "unsubscribedReason": null, "doNotCall": false, "mktoDoNotCallCause": null, "doNotCallReason": null, "marketingSuspended": false, "marketingSuspendedCause": null, "blackListed": false, "blackListedCause": null, "mktoPersonNotes": null, "anonymousIP": null, "inferredCompany": null, "inferredCountry": null, "inferredCity": null, "inferredStateRegion": null, "inferredPostalCode": null, "inferredMetropolitanArea": null, "inferredPhoneAreaCode": null, "emailSuspended": null, "emailSuspendedCause": null, "emailSuspendedAt": null, "department": null, "createdAt": "2022-06-21T08:32:37Z", "updatedAt": "2022-06-21T08:32:37Z", "cookies": null, "externalSalesPersonId": null, "leadPerson": "879", "leadRole": null, "leadSource": null, "leadStatus": null, "leadScore": null, "urgency": null, "priority": null, "relativeScore": null, "relativeUrgency": null, "rating": null, "personPrimaryLeadInterest": "879", "leadPartitionId": "1", "leadRevenueCycleModelId": null, "leadRevenueStageId": null, "acquisitionProgramId": null, "mktoAcquisitionDate": null}, "emitted_at": 1655800613400} +{"stream": "leads", "data": {"company": "KeenEye", "site": null, "billingStreet": null, "billingCity": null, "billingState": null, "billingCountry": null, "billingPostalCode": null, "website": null, "mainPhone": null, "annualRevenue": null, "numberOfEmployees": null, "industry": null, "sicCode": null, "mktoCompanyNotes": null, "externalCompanyId": null, "id": 880, "mktoName": "Alastor Moody", "personType": "contact", "mktoIsPartner": false, "isLead": true, "mktoIsCustomer": false, "isAnonymous": false, "salutation": null, "firstName": "Alastor", "middleName": null, "lastName": "Moody", "email": "alastor@moody.com", "phone": null, "mobilePhone": null, "fax": null, "title": null, "contactCompany": "88", "dateOfBirth": null, "address": null, "city": null, "state": null, "country": null, "postalCode": null, "personTimeZone": null, "originalSourceType": "New lead", "originalSourceInfo": null, "registrationSourceType": "New lead", "registrationSourceInfo": null, "originalSearchEngine": null, "originalSearchPhrase": null, "originalReferrer": null, "emailInvalid": false, "emailInvalidCause": null, "unsubscribed": false, "unsubscribedReason": null, "doNotCall": false, "mktoDoNotCallCause": null, "doNotCallReason": null, "marketingSuspended": false, "marketingSuspendedCause": null, "blackListed": false, "blackListedCause": null, "mktoPersonNotes": null, "anonymousIP": null, "inferredCompany": null, "inferredCountry": null, "inferredCity": null, "inferredStateRegion": null, "inferredPostalCode": null, "inferredMetropolitanArea": null, "inferredPhoneAreaCode": null, "emailSuspended": null, "emailSuspendedCause": null, "emailSuspendedAt": null, "department": null, "createdAt": "2022-06-21T08:34:25Z", "updatedAt": "2022-06-21T08:34:25Z", "cookies": null, "externalSalesPersonId": null, "leadPerson": "880", "leadRole": null, "leadSource": null, "leadStatus": null, "leadScore": null, "urgency": null, "priority": null, "relativeScore": null, "relativeUrgency": null, "rating": null, "personPrimaryLeadInterest": "880", "leadPartitionId": "1", "leadRevenueCycleModelId": null, "leadRevenueStageId": null, "acquisitionProgramId": null, "mktoAcquisitionDate": null}, "emitted_at": 1655800613401} diff --git a/airbyte-integrations/connectors/source-marketo/setup.py b/airbyte-integrations/connectors/source-marketo/setup.py index 9f645e47ae16..054f5677afb3 100644 --- a/airbyte-integrations/connectors/source-marketo/setup.py +++ b/airbyte-integrations/connectors/source-marketo/setup.py @@ -12,6 +12,7 @@ TEST_REQUIREMENTS = [ "pytest~=6.1", "pytest-mock~=3.6.1", + "requests-mock", "source-acceptance-test", ] diff --git a/airbyte-integrations/connectors/source-marketo/source_marketo/schemas/programs.json b/airbyte-integrations/connectors/source-marketo/source_marketo/schemas/programs.json index 4ac51c43d878..242121f5e26c 100644 --- a/airbyte-integrations/connectors/source-marketo/source_marketo/schemas/programs.json +++ b/airbyte-integrations/connectors/source-marketo/source_marketo/schemas/programs.json @@ -40,6 +40,9 @@ "workspace": { "type": ["null", "string"] }, + "headStart": { + "type": ["null", "boolean"] + }, "folder": { "type": ["object", "null"], "properties": { diff --git a/airbyte-integrations/connectors/source-marketo/source_marketo/source.py b/airbyte-integrations/connectors/source-marketo/source_marketo/source.py index ce119e1635af..59295de6b781 100644 --- a/airbyte-integrations/connectors/source-marketo/source_marketo/source.py +++ b/airbyte-integrations/connectors/source-marketo/source_marketo/source.py @@ -99,7 +99,7 @@ def get_updated_state(self, current_stream_state: MutableMapping[str, Any], late ) } - def stream_slices(self, sync_mode, stream_state: Mapping[str, Any] = None, **kwargs) -> Iterable[Optional[Mapping[str, any]]]: + def stream_slices(self, sync_mode, stream_state: Mapping[str, Any] = None, **kwargs) -> Iterable[Optional[MutableMapping[str, any]]]: """ Override default stream_slices CDK method to provide date_slices as page chunks for data fetch. Returns list of dict, example: [{ @@ -172,7 +172,9 @@ def get_export_status(self, stream_slice): def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: return f"bulk/v1/{self.stream_name}/export/{stream_slice['id']}/file.json" - def stream_slices(self, sync_mode, stream_state: Mapping[str, Any] = None, **kwargs) -> Iterable[Optional[Mapping[str, any]]]: + def stream_slices( + self, sync_mode, stream_state: MutableMapping[str, Any] = None, **kwargs + ) -> Iterable[Optional[MutableMapping[str, any]]]: date_slices = super().stream_slices(sync_mode, stream_state, **kwargs) for date_slice in date_slices: @@ -182,8 +184,12 @@ def stream_slices(self, sync_mode, stream_state: Mapping[str, Any] = None, **kwa export = self.create_export(param) - date_slice["id"] = export["exportId"] - return date_slices + status, export_id = export.get("status", "").lower(), export.get("exportId") + if status != "created" or not export_id: + self.logger.warning(f"Failed to create export job for data slice {date_slice}!") + continue + date_slice["id"] = export_id + yield date_slice def sleep_till_export_completed(self, stream_slice: Mapping[str, Any]) -> bool: while True: diff --git a/airbyte-integrations/connectors/source-marketo/source_marketo/spec.json b/airbyte-integrations/connectors/source-marketo/source_marketo/spec.json index 5e5d57747c42..9af488bf4cdc 100644 --- a/airbyte-integrations/connectors/source-marketo/source_marketo/spec.json +++ b/airbyte-integrations/connectors/source-marketo/source_marketo/spec.json @@ -18,7 +18,6 @@ "client_id": { "title": "Client ID", "type": "string", - "title": "Client ID", "description": "The Client ID of your Marketo developer application. See the docs for info on how to obtain this.", "order": 0, "airbyte_secret": true @@ -26,7 +25,6 @@ "client_secret": { "title": "Client Secret", "type": "string", - "title": "Client Secret", "description": "The Client Secret of your Marketo developer application. See the docs for info on how to obtain this.", "order": 1, "airbyte_secret": true @@ -35,7 +33,6 @@ "title": "Start Date", "type": "string", "order": 2, - "title": "Start Date", "description": "UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.", "examples": ["2020-09-25T00:00:00Z"], "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" diff --git a/airbyte-integrations/connectors/source-marketo/unit_tests/conftest.py b/airbyte-integrations/connectors/source-marketo/unit_tests/conftest.py new file mode 100644 index 000000000000..03a9195f4799 --- /dev/null +++ b/airbyte-integrations/connectors/source-marketo/unit_tests/conftest.py @@ -0,0 +1,58 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import pendulum +import pytest +from source_marketo.source import Activities, MarketoAuthenticator + + +@pytest.fixture(autouse=True) +def mock_requests(requests_mock): + requests_mock.register_uri( + "GET", "https://602-euo-598.mktorest.com/identity/oauth/token", json={"access_token": "token", "expires_in": 3600} + ) + requests_mock.register_uri( + "POST", + "https://602-euo-598.mktorest.com/bulk/v1/activities/export/create.json", + [ + {"json": {"result": [{"exportId": "2c09ce6d", "format": "CSV", "status": "Created", "createdAt": "2022-06-20T08:44:08Z"}]}}, + {"json": {"result": [{"exportId": "cd465f55", "format": "CSV", "status": "Created", "createdAt": "2022-06-20T08:45:08Z"}]}}, + {"json": {"result": [{"exportId": "null", "format": "CSV", "status": "Failed", "createdAt": "2022-06-20T08:46:08Z"}]}}, + {"json": {"result": [{"exportId": "232aafb4", "format": "CSV", "status": "Created", "createdAt": "2022-06-20T08:47:08Z"}]}}, + ], + ) + + +@pytest.fixture +def config(): + start_date = pendulum.now().subtract(days=100).strftime("%Y-%m-%dT%H:%M:%SZ") + config = { + "client_id": "client-id", + "client_secret": "********", + "domain_url": "https://602-EUO-598.mktorest.com", + "start_date": start_date, + "window_in_days": 30, + } + config["authenticator"] = MarketoAuthenticator(config) + return config + + +@pytest.fixture +def send_email_stream(config): + activity = { + "id": 6, + "name": "send_email", + "description": "Send Marketo Email to a person", + "primaryAttribute": {"name": "Mailing ID", "dataType": "integer"}, + "attributes": [ + {"name": "Campaign Run ID", "dataType": "integer"}, + {"name": "Choice Number", "dataType": "integer"}, + {"name": "Has Predictive", "dataType": "boolean"}, + {"name": "Step ID", "dataType": "integer"}, + {"name": "Test Variant", "dataType": "integer"}, + ], + } + stream_name = f"activities_{activity['name']}" + cls = type(stream_name, (Activities,), {"activity": activity}) + return cls(config) diff --git a/airbyte-integrations/connectors/source-marketo/unit_tests/test_stream_slices.py b/airbyte-integrations/connectors/source-marketo/unit_tests/test_stream_slices.py new file mode 100644 index 000000000000..6d1b6aac923e --- /dev/null +++ b/airbyte-integrations/connectors/source-marketo/unit_tests/test_stream_slices.py @@ -0,0 +1,19 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import logging +from unittest.mock import ANY + +from airbyte_cdk.models.airbyte_protocol import SyncMode + + +def test_create_export_job(send_email_stream, caplog): + caplog.set_level(logging.WARNING) + slices = list(send_email_stream.stream_slices(sync_mode=SyncMode.incremental)) + assert slices == [ + {"endAt": ANY, "id": "2c09ce6d", "startAt": ANY}, + {"endAt": ANY, "id": "cd465f55", "startAt": ANY}, + {"endAt": ANY, "id": "232aafb4", "startAt": ANY}, + ] + assert "Failed to create export job for data slice " in caplog.records[-1].message diff --git a/airbyte-integrations/connectors/source-metabase/Dockerfile b/airbyte-integrations/connectors/source-metabase/Dockerfile new file mode 100644 index 000000000000..52cc9096794a --- /dev/null +++ b/airbyte-integrations/connectors/source-metabase/Dockerfile @@ -0,0 +1,16 @@ +FROM python:3.9-slim + +# Bash is installed for more convenient debugging. +RUN apt-get update && apt-get install -y bash && rm -rf /var/lib/apt/lists/* + +WORKDIR /airbyte/integration_code +COPY source_metabase ./source_metabase +COPY main.py ./ +COPY setup.py ./ +RUN pip install . + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-metabase diff --git a/airbyte-integrations/connectors/source-metabase/README.md b/airbyte-integrations/connectors/source-metabase/README.md new file mode 100644 index 000000000000..83176089ae17 --- /dev/null +++ b/airbyte-integrations/connectors/source-metabase/README.md @@ -0,0 +1,129 @@ +# Metabase Source + +This is the repository for the Metabase source connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/metabase). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.7.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-metabase:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/metabase) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_metabase/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `sample_files/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source metabase test creds` +and place them into `secrets/config.json`. + + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py discover --config secrets/config.json +python main.py read --config secrets/config.json --catalog sample_files/configured_catalog.json +``` + +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +python -m pytest unit_tests +``` + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. +To run your integration tests with acceptance tests, from the connector root, run +``` +python -m pytest integration_tests -p integration_tests.acceptance +``` + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-metabase:unitTest +``` + +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-metabase:integrationTest +``` + +#### Build +To run your integration tests with docker localy + +First, make sure you build the latest Docker image: +``` +docker build --no-cache . -t airbyte/source-metabase:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew clean :airbyte-integrations:connectors:source-metabase:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-metabase:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-metabase:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-metabase:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/sample_files:/sample_files airbyte/source-metabase:dev read --config /secrets/config.json --catalog /sample_files/configured_catalog.json +``` + +### Integration Tests +1. From the airbyte project root, run `./gradlew :airbyte-integrations:connectors:source-metabase:integrationTest` to run the standard integration test suite. +1. To run additional integration tests, place your integration tests in a new directory `integration_tests` and run them with `python -m pytest -s integration_tests`. + Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests +2. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use SemVer). +3. Create a Pull Request +4. Pat yourself on the back for being an awesome contributor +5. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master + + +### additional connector/streams properties of note + +Some metabase streams are mutable, meaning that after an incremental update, new data items could appear *before* +the latest update date. To work around that, define the lookback_window_days to define a window in days to fetch results +before the latest state date, in order to capture "delayed" data items. diff --git a/airbyte-integrations/connectors/source-metabase/acceptance-test-config.yml b/airbyte-integrations/connectors/source-metabase/acceptance-test-config.yml new file mode 100644 index 000000000000..9efa11184e37 --- /dev/null +++ b/airbyte-integrations/connectors/source-metabase/acceptance-test-config.yml @@ -0,0 +1,14 @@ +connector_image: airbyte/source-metabase:dev +tests: + spec: + - spec_path: "source_metabase/spec.yaml" + connection: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + - config_path: "secrets/config.json" + basic_read: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-metabase/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-metabase/acceptance-test-docker.sh new file mode 100644 index 000000000000..e4d8b1cef896 --- /dev/null +++ b/airbyte-integrations/connectors/source-metabase/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-metabase/bootstrap.md b/airbyte-integrations/connectors/source-metabase/bootstrap.md new file mode 100644 index 000000000000..fb9ebdced69f --- /dev/null +++ b/airbyte-integrations/connectors/source-metabase/bootstrap.md @@ -0,0 +1,41 @@ +# Metabase + +## Overview + +Metabase is an open-source Data Visualization tool popular for business intelligence applications. +It also offers embeddable charts and interactive dashboards, GUI and SQL editors to create questions or cards +that queries data from major data warehouses and databases with auditing and data sandboxing features, and more. + +Just like Airbyte, it offers the options for deployment: +- self-hosted through their Open-Source or licensed (paid) versions which unlock more features. +- cloud managed by Metabase for their paying customers. + +## Endpoints + +This source connector uses Metabase API which can be both from a self-hosted or cloud-managed instance and uses HTTP as protocol. + +## Quick Notes + +Following the [introduction document to Metabase's API](https://www.metabase.com/learn/administration/metabase-api.html), there is currently +only one authentication method using a session token to authenticate requests. + +To get a session token, one needs to submit a request to the /api/session endpoint with a username and password: +By default, such sessions are good for 14 days and the credentials tokens should be cached to be reused until they expire, +because logins are rate-limited for security. Invalid and expired session tokens return a 401 (Unauthorized) status code. + +Because of this, the connector configuration needs to be supplied with the session_token id as the connector is not able to +edit its own configuration with the new value everytime it runs. + +A consequence of this limitation is that the configuration of the connector will have to be updated when the credential token expires +(every 14 days). Unless, the airbyte-server is able to refresh this token and persist the value of the new token. + +If the connector is supplied with only username and password, a session_token will be generated everytime an +authenticated query is running, which might trigger security alerts on the user's account. + +All the API from metabase don't seem to support incremental sync modes as they don't expose cursor field values or pagination. +So all streams only support full refresh sync modes for the moment. + +## API Reference + +The Metabase reference documents: [Metabase API documentation](https://www.metabase.com/docs/latest/api-documentation.html) + diff --git a/airbyte-integrations/connectors/source-metabase/build.gradle b/airbyte-integrations/connectors/source-metabase/build.gradle new file mode 100644 index 000000000000..27011735b5c5 --- /dev/null +++ b/airbyte-integrations/connectors/source-metabase/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_metabase' +} diff --git a/airbyte-integrations/connectors/source-metabase/integration_tests/__init__.py b/airbyte-integrations/connectors/source-metabase/integration_tests/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/airbyte-integrations/connectors/source-metabase/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-metabase/integration_tests/acceptance.py new file mode 100644 index 000000000000..950b53b59d41 --- /dev/null +++ b/airbyte-integrations/connectors/source-metabase/integration_tests/acceptance.py @@ -0,0 +1,14 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + yield diff --git a/airbyte-integrations/connectors/source-metabase/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-metabase/integration_tests/configured_catalog.json new file mode 100644 index 000000000000..867b58c9afc3 --- /dev/null +++ b/airbyte-integrations/connectors/source-metabase/integration_tests/configured_catalog.json @@ -0,0 +1,84 @@ +{ + "streams": [ + { + "stream": { + "name": "activity", + "json_schema": {}, + "supported_sync_modes": [ + "full_refresh" + ], + "source_defined_primary_key": [ + [ + "id" + ] + ] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "cards", + "json_schema": {}, + "supported_sync_modes": [ + "full_refresh" + ], + "source_defined_primary_key": [ + [ + "id" + ] + ] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "collections", + "json_schema": {}, + "supported_sync_modes": [ + "full_refresh" + ], + "source_defined_primary_key": [ + [ + "id" + ] + ] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "dashboards", + "json_schema": {}, + "supported_sync_modes": [ + "full_refresh" + ], + "source_defined_primary_key": [ + [ + "id" + ] + ] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "users", + "json_schema": {}, + "supported_sync_modes": [ + "full_refresh" + ], + "source_defined_primary_key": [ + [ + "id" + ] + ] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-metabase/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-metabase/integration_tests/invalid_config.json new file mode 100644 index 000000000000..474f00af911d --- /dev/null +++ b/airbyte-integrations/connectors/source-metabase/integration_tests/invalid_config.json @@ -0,0 +1,6 @@ +{ + "instance_api_url": "localhost:3000", + "username": "wrong-account-id", + "password": "2020-05-01T00:00:00Z", + "session_token": "invalid" +} diff --git a/airbyte-integrations/connectors/source-metabase/main.py b/airbyte-integrations/connectors/source-metabase/main.py new file mode 100644 index 000000000000..dfddde2d56f9 --- /dev/null +++ b/airbyte-integrations/connectors/source-metabase/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_metabase import SourceMetabase + +if __name__ == "__main__": + source = SourceMetabase() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-metabase/requirements.txt b/airbyte-integrations/connectors/source-metabase/requirements.txt new file mode 100644 index 000000000000..0411042aa091 --- /dev/null +++ b/airbyte-integrations/connectors/source-metabase/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-metabase/sample_files/config.json b/airbyte-integrations/connectors/source-metabase/sample_files/config.json new file mode 100644 index 000000000000..7f711807ec05 --- /dev/null +++ b/airbyte-integrations/connectors/source-metabase/sample_files/config.json @@ -0,0 +1,6 @@ +{ + "instance_api_url": "https://localhost:3000/api/", + "username": "username", + "password": "", + "session_token": "" +} diff --git a/airbyte-integrations/connectors/source-metabase/sample_files/configured_catalog.json b/airbyte-integrations/connectors/source-metabase/sample_files/configured_catalog.json new file mode 100644 index 000000000000..f6a2cd9b6730 --- /dev/null +++ b/airbyte-integrations/connectors/source-metabase/sample_files/configured_catalog.json @@ -0,0 +1,1172 @@ +{ + "streams": [ + { + "stream": { + "name": "activity", + "json_schema": { + "type": [ + "null", + "object" + ], + "properties": { + "table_id": { + "type": [ + "null", + "integer" + ] + }, + "table": { + "type": [ + "null", + "string" + ] + }, + "database_id": { + "type": [ + "null", + "integer" + ] + }, + "model_exists": { + "type": [ + "null", + "boolean" + ] + }, + "topic": { + "type": [ + "null", + "string" + ] + }, + "custom_id": { + "type": [ + "null", + "string" + ] + }, + "details": { + "type": [ + "null", + "object" + ], + "properties": { + "description": { + "type": [ + "null", + "string" + ] + }, + "name": { + "type": [ + "null", + "string" + ] + }, + "dashcards": { + "type": [ + "null", + "array" + ], + "items": { + "properties": { + "id": { + "type": [ + "null", + "integer" + ] + }, + "card_id": { + "type": [ + "null", + "integer" + ] + }, + "exists": { + "type": [ + "null", + "boolean" + ] + } + } + } + } + } + }, + "model_id": { + "type": [ + "null", + "integer" + ] + }, + "id": { + "type": [ + "null", + "integer" + ] + }, + "database": { + "type": [ + "null", + "object" + ], + "properties": { + "description": { + "type": [ + "null", + "string" + ] + }, + "features": { + "type": [ + "null", + "array" + ] + }, + "cache_field_values_schedule": { + "type": [ + "null", + "string" + ] + }, + "timezone": { + "type": [ + "null", + "string" + ] + }, + "auto_run_queries": { + "type": [ + "null", + "boolean" + ] + }, + "metadata_sync_schedule": { + "type": [ + "null", + "string" + ] + }, + "name": { + "type": [ + "null", + "string" + ] + }, + "caveats": { + "type": [ + "null", + "string" + ] + }, + "creator_id": { + "type": [ + "null", + "integer" + ] + }, + "is_full_sync": { + "type": [ + "null", + "boolean" + ] + }, + "updated_at": { + "type": [ + "null", + "string" + ] + }, + "cache_ttl": { + "type": [ + "null", + "integer" + ] + }, + "is_sample": { + "type": [ + "null", + "boolean" + ] + }, + "id": { + "type": [ + "null", + "integer" + ] + }, + "is_on_demand": { + "type": [ + "null", + "boolean" + ] + }, + "options": { + "type": [ + "null", + "string" + ] + }, + "engine": { + "type": [ + "null", + "string" + ] + }, + "initial_sync_status": { + "type": [ + "null", + "string" + ] + }, + "refingerprint": { + "type": [ + "null", + "boolean" + ] + }, + "created_at": { + "type": [ + "null", + "string" + ] + }, + "points_of_interest": { + "type": [ + "null", + "string" + ] + } + } + }, + "user_id": { + "type": [ + "null", + "integer" + ] + }, + "timestamp": { + "type": [ + "null", + "string" + ] + }, + "user": { + "type": [ + "null", + "object" + ], + "properties": { + "email": { + "type": [ + "null", + "string" + ] + }, + "first_name": { + "type": [ + "null", + "string" + ] + }, + "last_login": { + "type": [ + "null", + "string" + ] + }, + "is_qbnewb": { + "type": [ + "null", + "boolean" + ] + }, + "is_superuser": { + "type": [ + "null", + "boolean" + ] + }, + "id": { + "type": [ + "null", + "integer" + ] + }, + "last_name": { + "type": [ + "null", + "string" + ] + }, + "date_joined": { + "type": [ + "null", + "string" + ] + }, + "common_name": { + "type": [ + "null", + "string" + ] + } + } + }, + "model": { + "type": [ + "null", + "string" + ] + } + } + }, + "supported_sync_modes": [ + "full_refresh" + ], + "source_defined_primary_key": [ + [ + "id" + ] + ] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "cards", + "json_schema": { + "type": [ + "null", + "object" + ], + "properties": { + "description": { + "type": [ + "null", + "string" + ] + }, + "archived": { + "type": [ + "null", + "boolean" + ] + }, + "collection_position": { + "type": [ + "null", + "integer" + ] + }, + "table_id": { + "type": [ + "null", + "integer" + ] + }, + "result_metadata": { + "type": [ + "null", + "array" + ], + "items": { + "properties": { + "display_name": { + "type": [ + "null", + "string" + ] + }, + "field_ref": { + "type": [ + "null", + "string", + "array" + ] + }, + "name": { + "type": [ + "null", + "string" + ] + }, + "base_type": { + "type": [ + "null", + "string" + ] + }, + "effective_type": { + "type": [ + "null", + "string" + ] + }, + "semantic_type": { + "type": [ + "null", + "string" + ] + }, + "fingerprint": { + "type": [ + "null", + "object" + ] + } + } + } + }, + "creator": { + "type": [ + "null", + "object" + ], + "properties": { + "email": { + "type": [ + "null", + "string" + ] + }, + "first_name": { + "type": [ + "null", + "string" + ] + }, + "last_login": { + "type": [ + "null", + "string" + ] + }, + "is_qbnewb": { + "type": [ + "null", + "boolean" + ] + }, + "is_superuser": { + "type": [ + "null", + "boolean" + ] + }, + "id": { + "type": [ + "null", + "integer" + ] + }, + "last_name": { + "type": [ + "null", + "string" + ] + }, + "date_joined": { + "type": [ + "null", + "string" + ] + }, + "common_name": { + "type": [ + "null", + "string" + ] + } + } + }, + "database_id": { + "type": [ + "null", + "integer" + ] + }, + "enable_embedding": { + "type": [ + "null", + "boolean" + ] + }, + "collection_id": { + "type": [ + "null", + "integer" + ] + }, + "query_type": { + "type": [ + "null", + "string" + ] + }, + "name": { + "type": [ + "null", + "string" + ] + }, + "creator_id": { + "type": [ + "null", + "integer" + ] + }, + "updated_at": { + "type": [ + "null", + "string" + ] + }, + "made_public_by_id": { + "type": [ + "null", + "integer" + ] + }, + "embedding_params": { + "type": [ + "null", + "string" + ] + }, + "cache_ttl": { + "type": [ + "null", + "integer" + ] + }, + "dataset_query": { + "type": [ + "null", + "object" + ], + "properties": { + "type": { + "type": [ + "null", + "string" + ] + }, + "native": { + "type": [ + "null", + "object" + ], + "properties": { + "query": { + "type": [ + "null", + "string" + ] + }, + "template-tags": { + "type": [ + "null", + "object" + ] + } + } + }, + "database": { + "type": [ + "null", + "integer" + ] + } + } + }, + "id": { + "type": [ + "null", + "integer" + ] + }, + "display": { + "type": [ + "null", + "string" + ] + }, + "last-edit-info": { + "type": [ + "null", + "object" + ], + "properties": { + "id": { + "type": [ + "null", + "integer" + ] + }, + "email": { + "type": [ + "null", + "string" + ] + }, + "first_name": { + "type": [ + "null", + "string" + ] + }, + "last_name": { + "type": [ + "null", + "string" + ] + }, + "timestamp": { + "type": [ + "null", + "string" + ] + } + } + }, + "visualization_settings": { + "type": [ + "null", + "object" + ] + }, + "collection": { + "type": [ + "null", + "object" + ], + "properties": { + "authority_level": { + "type": [ + "null", + "string" + ] + }, + "description": { + "type": [ + "null", + "string" + ] + }, + "archived": { + "type": [ + "null", + "boolean" + ] + }, + "slug": { + "type": [ + "null", + "string" + ] + }, + "color": { + "type": [ + "null", + "string" + ] + }, + "name": { + "type": [ + "null", + "string" + ] + }, + "personal_owner_id": { + "type": [ + "null", + "integer" + ] + }, + "id": { + "type": [ + "null", + "integer" + ] + }, + "location": { + "type": [ + "null", + "string" + ] + }, + "namespace": { + "type": [ + "null", + "string" + ] + } + } + }, + "dataset": { + "type": [ + "null", + "boolean" + ] + }, + "created_at": { + "type": [ + "null", + "string" + ] + }, + "public_uuid": { + "type": [ + "null", + "string" + ] + } + } + }, + "supported_sync_modes": [ + "full_refresh" + ], + "source_defined_primary_key": [ + [ + "id" + ] + ] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "collections", + "json_schema": { + "type": [ + "null", + "object" + ], + "properties": { + "authority_level": { + "type": [ + "null", + "string" + ] + }, + "description": { + "type": [ + "null", + "string" + ] + }, + "archived": { + "type": [ + "null", + "boolean" + ] + }, + "slug": { + "type": [ + "null", + "string" + ] + }, + "color": { + "type": [ + "null", + "string" + ] + }, + "can_write": { + "type": [ + "null", + "boolean" + ] + }, + "name": { + "type": [ + "null", + "string" + ] + }, + "personal_owner_id": { + "type": [ + "null", + "integer" + ] + }, + "id": { + "type": [ + "null", + "integer", + "string" + ] + }, + "location": { + "type": [ + "null", + "string" + ] + }, + "namespace": { + "type": [ + "null", + "string" + ] + } + } + }, + "supported_sync_modes": [ + "full_refresh" + ], + "source_defined_primary_key": [ + [ + "id" + ] + ] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "dashboards", + "json_schema": { + "type": [ + "null", + "object" + ], + "properties": { + "description": { + "type": [ + "null", + "string" + ] + }, + "archived": { + "type": [ + "null", + "boolean" + ] + }, + "collection_position": { + "type": [ + "null", + "integer" + ] + }, + "creator": { + "type": [ + "null", + "object" + ], + "properties": { + "email": { + "type": [ + "null", + "string" + ] + }, + "first_name": { + "type": [ + "null", + "string" + ] + }, + "last_login": { + "type": [ + "null", + "string" + ] + }, + "is_qbnewb": { + "type": [ + "null", + "boolean" + ] + }, + "is_superuser": { + "type": [ + "null", + "boolean" + ] + }, + "id": { + "type": [ + "null", + "integer" + ] + }, + "last_name": { + "type": [ + "null", + "string" + ] + }, + "date_joined": { + "type": [ + "null", + "string" + ] + }, + "common_name": { + "type": [ + "null", + "string" + ] + } + } + }, + "enable_embedding": { + "type": [ + "null", + "boolean" + ] + }, + "collection_id": { + "type": [ + "null", + "integer" + ] + }, + "show_in_getting_started": { + "type": [ + "null", + "boolean" + ] + }, + "name": { + "type": [ + "null", + "string" + ] + }, + "caveats": { + "type": [ + "null", + "string" + ] + }, + "creator_id": { + "type": [ + "null", + "integer" + ] + }, + "updated_at": { + "type": [ + "null", + "string" + ] + }, + "made_public_by_id": { + "type": [ + "null", + "integer" + ] + }, + "embedding_params": { + "type": [ + "null", + "object" + ] + }, + "cache_ttl": { + "type": [ + "null", + "integer" + ] + }, + "id": { + "type": [ + "null", + "integer" + ] + }, + "position": { + "type": [ + "null", + "string" + ] + }, + "last-edit-info": { + "type": [ + "null", + "object" + ], + "properties": { + "id": { + "type": [ + "null", + "integer" + ] + }, + "email": { + "type": [ + "null", + "string" + ] + }, + "first_name": { + "type": [ + "null", + "string" + ] + }, + "last_name": { + "type": [ + "null", + "string" + ] + }, + "timestamp": { + "type": [ + "null", + "string" + ] + } + } + }, + "parameters": { + "type": [ + "null", + "array" + ], + "items": { + "properties": { + "name": { + "type": [ + "null", + "string", + "array" + ] + }, + "slug": { + "type": [ + "null", + "string" + ] + }, + "id": { + "type": [ + "null", + "string" + ] + }, + "type": { + "type": [ + "null", + "string" + ] + }, + "sectionId": { + "type": [ + "null", + "string" + ] + }, + "default": { + "type": [ + "null", + "array", + "string" + ], + "items": { + "type": [ + "null", + "array", + "boolean", + "integer", + "string" + ] + } + } + } + } + }, + "created_at": { + "type": [ + "null", + "string" + ] + }, + "public_uuid": { + "type": [ + "null", + "string" + ] + }, + "points_of_interest": { + "type": [ + "null", + "string" + ] + } + } + }, + "supported_sync_modes": [ + "full_refresh" + ], + "source_defined_primary_key": [ + [ + "id" + ] + ] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "users", + "json_schema": { + "type": [ + "null", + "object" + ], + "properties": { + "id": { + "type": [ + "null", + "integer" + ] + }, + "email": { + "type": [ + "null", + "string" + ] + }, + "first_name": { + "type": [ + "null", + "string" + ] + }, + "last_name": { + "type": [ + "null", + "string" + ] + }, + "common_name": { + "type": [ + "null", + "string" + ] + } + } + }, + "supported_sync_modes": [ + "full_refresh" + ], + "source_defined_primary_key": [ + [ + "id" + ] + ] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-metabase/sample_files/state.json b/airbyte-integrations/connectors/source-metabase/sample_files/state.json new file mode 100644 index 000000000000..0967ef424bce --- /dev/null +++ b/airbyte-integrations/connectors/source-metabase/sample_files/state.json @@ -0,0 +1 @@ +{} diff --git a/airbyte-integrations/connectors/source-metabase/setup.py b/airbyte-integrations/connectors/source-metabase/setup.py new file mode 100644 index 000000000000..4ad3f0543eac --- /dev/null +++ b/airbyte-integrations/connectors/source-metabase/setup.py @@ -0,0 +1,27 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = ["airbyte-cdk~=0.1", "requests>=2.28.0", "types-requests>=2.27.30"] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "requests-mock", + "requests_mock~=1.8", +] + +setup( + name="source_metabase", + description="Source implementation for Metabase.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-metabase/source_metabase/__init__.py b/airbyte-integrations/connectors/source-metabase/source_metabase/__init__.py new file mode 100644 index 000000000000..dedee206ecba --- /dev/null +++ b/airbyte-integrations/connectors/source-metabase/source_metabase/__init__.py @@ -0,0 +1,3 @@ +from .source import SourceMetabase + +__all__ = ["SourceMetabase"] diff --git a/airbyte-integrations/connectors/source-metabase/source_metabase/schemas/activity.json b/airbyte-integrations/connectors/source-metabase/source_metabase/schemas/activity.json new file mode 100644 index 000000000000..f8c300a90e5c --- /dev/null +++ b/airbyte-integrations/connectors/source-metabase/source_metabase/schemas/activity.json @@ -0,0 +1,318 @@ +{ + "type": [ + "null", + "object" + ], + "properties": { + "table_id": { + "type": [ + "null", + "integer" + ] + }, + "table": { + "type": [ + "null", + "string" + ] + }, + "database_id": { + "type": [ + "null", + "integer" + ] + }, + "model_exists": { + "type": [ + "null", + "boolean" + ] + }, + "topic": { + "type": [ + "null", + "string" + ] + }, + "custom_id": { + "type": [ + "null", + "string" + ] + }, + "details": { + "type": [ + "null", + "object" + ], + "properties": { + "description": { + "type": [ + "null", + "string" + ] + }, + "name": { + "type": [ + "null", + "string" + ] + }, + "dashcards": { + "type": [ + "null", + "array" + ], + "items": { + "properties": { + "id": { + "type": [ + "null", + "integer" + ] + }, + "card_id": { + "type": [ + "null", + "integer" + ] + }, + "exists": { + "type": [ + "null", + "boolean" + ] + } + } + } + } + } + }, + "model_id": { + "type": [ + "null", + "integer" + ] + }, + "id": { + "type": [ + "null", + "integer" + ] + }, + "database": { + "type": [ + "null", + "object" + ], + "properties": { + "description": { + "type": [ + "null", + "string" + ] + }, + "features": { + "type": [ + "null", + "array" + ] + }, + "cache_field_values_schedule": { + "type": [ + "null", + "string" + ] + }, + "timezone": { + "type": [ + "null", + "string" + ] + }, + "auto_run_queries": { + "type": [ + "null", + "boolean" + ] + }, + "metadata_sync_schedule": { + "type": [ + "null", + "string" + ] + }, + "name": { + "type": [ + "null", + "string" + ] + }, + "caveats": { + "type": [ + "null", + "string" + ] + }, + "creator_id": { + "type": [ + "null", + "integer" + ] + }, + "is_full_sync": { + "type": [ + "null", + "boolean" + ] + }, + "updated_at": { + "type": [ + "null", + "string" + ] + }, + "cache_ttl": { + "type": [ + "null", + "integer" + ] + }, + "is_sample": { + "type": [ + "null", + "boolean" + ] + }, + "id": { + "type": [ + "null", + "integer" + ] + }, + "is_on_demand": { + "type": [ + "null", + "boolean" + ] + }, + "options": { + "type": [ + "null", + "string" + ] + }, + "engine": { + "type": [ + "null", + "string" + ] + }, + "initial_sync_status": { + "type": [ + "null", + "string" + ] + }, + "refingerprint": { + "type": [ + "null", + "boolean" + ] + }, + "created_at": { + "type": [ + "null", + "string" + ] + }, + "points_of_interest": { + "type": [ + "null", + "string" + ] + } + } + }, + "user_id": { + "type": [ + "null", + "integer" + ] + }, + "timestamp": { + "type": [ + "null", + "string" + ] + }, + "user": { + "type": [ + "null", + "object" + ], + "properties": { + "email": { + "type": [ + "null", + "string" + ] + }, + "first_name": { + "type": [ + "null", + "string" + ] + }, + "last_login": { + "type": [ + "null", + "string" + ] + }, + "is_qbnewb": { + "type": [ + "null", + "boolean" + ] + }, + "is_superuser": { + "type": [ + "null", + "boolean" + ] + }, + "id": { + "type": [ + "null", + "integer" + ] + }, + "last_name": { + "type": [ + "null", + "string" + ] + }, + "date_joined": { + "type": [ + "null", + "string" + ] + }, + "common_name": { + "type": [ + "null", + "string" + ] + } + } + }, + "model": { + "type": [ + "null", + "string" + ] + } + } +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-metabase/source_metabase/schemas/cards.json b/airbyte-integrations/connectors/source-metabase/source_metabase/schemas/cards.json new file mode 100644 index 000000000000..3f8dc54d6605 --- /dev/null +++ b/airbyte-integrations/connectors/source-metabase/source_metabase/schemas/cards.json @@ -0,0 +1,389 @@ +{ + "type": [ + "null", + "object" + ], + "properties": { + "description": { + "type": [ + "null", + "string" + ] + }, + "archived": { + "type": [ + "null", + "boolean" + ] + }, + "collection_position": { + "type": [ + "null", + "integer" + ] + }, + "table_id": { + "type": [ + "null", + "integer" + ] + }, + "result_metadata": { + "type": [ + "null", + "array" + ], + "items": { + "properties": { + "display_name": { + "type": [ + "null", + "string" + ] + }, + "field_ref": { + "type": [ + "null", + "string", + "array" + ] + }, + "name": { + "type": [ + "null", + "string" + ] + }, + "base_type": { + "type": [ + "null", + "string" + ] + }, + "effective_type": { + "type": [ + "null", + "string" + ] + }, + "semantic_type": { + "type": [ + "null", + "string" + ] + }, + "fingerprint": { + "type": [ + "null", + "object" + ] + } + } + } + }, + "creator": { + "type": [ + "null", + "object" + ], + "properties": { + "email": { + "type": [ + "null", + "string" + ] + }, + "first_name": { + "type": [ + "null", + "string" + ] + }, + "last_login": { + "type": [ + "null", + "string" + ] + }, + "is_qbnewb": { + "type": [ + "null", + "boolean" + ] + }, + "is_superuser": { + "type": [ + "null", + "boolean" + ] + }, + "id": { + "type": [ + "null", + "integer" + ] + }, + "last_name": { + "type": [ + "null", + "string" + ] + }, + "date_joined": { + "type": [ + "null", + "string" + ] + }, + "common_name": { + "type": [ + "null", + "string" + ] + } + } + }, + "database_id": { + "type": [ + "null", + "integer" + ] + }, + "enable_embedding": { + "type": [ + "null", + "boolean" + ] + }, + "collection_id": { + "type": [ + "null", + "integer" + ] + }, + "query_type": { + "type": [ + "null", + "string" + ] + }, + "name": { + "type": [ + "null", + "string" + ] + }, + "creator_id": { + "type": [ + "null", + "integer" + ] + }, + "updated_at": { + "type": [ + "null", + "string" + ] + }, + "made_public_by_id": { + "type": [ + "null", + "integer" + ] + }, + "embedding_params": { + "type": [ + "null", + "string" + ] + }, + "cache_ttl": { + "type": [ + "null", + "integer" + ] + }, + "dataset_query": { + "type": [ + "null", + "object" + ], + "properties": { + "type": { + "type": [ + "null", + "string" + ] + }, + "native": { + "type": [ + "null", + "object" + ], + "properties": { + "query": { + "type": [ + "null", + "string" + ] + }, + "template-tags": { + "type": [ + "null", + "object" + ] + } + } + }, + "database": { + "type": [ + "null", + "integer" + ] + } + } + }, + "id": { + "type": [ + "null", + "integer" + ] + }, + "display": { + "type": [ + "null", + "string" + ] + }, + "last-edit-info": { + "type": [ + "null", + "object" + ], + "properties": { + "id": { + "type": [ + "null", + "integer" + ] + }, + "email": { + "type": [ + "null", + "string" + ] + }, + "first_name": { + "type": [ + "null", + "string" + ] + }, + "last_name": { + "type": [ + "null", + "string" + ] + }, + "timestamp": { + "type": [ + "null", + "string" + ] + } + } + }, + "visualization_settings": { + "type": [ + "null", + "object" + ] + }, + "collection": { + "type": [ + "null", + "object" + ], + "properties": { + "authority_level": { + "type": [ + "null", + "string" + ] + }, + "description": { + "type": [ + "null", + "string" + ] + }, + "archived": { + "type": [ + "null", + "boolean" + ] + }, + "slug": { + "type": [ + "null", + "string" + ] + }, + "color": { + "type": [ + "null", + "string" + ] + }, + "name": { + "type": [ + "null", + "string" + ] + }, + "personal_owner_id": { + "type": [ + "null", + "integer" + ] + }, + "id": { + "type": [ + "null", + "integer" + ] + }, + "location": { + "type": [ + "null", + "string" + ] + }, + "namespace": { + "type": [ + "null", + "string" + ] + } + } + }, + "dataset": { + "type": [ + "null", + "boolean" + ] + }, + "created_at": { + "type": [ + "null", + "string" + ] + }, + "public_uuid": { + "type": [ + "null", + "string" + ] + } + } +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-metabase/source_metabase/schemas/collections.json b/airbyte-integrations/connectors/source-metabase/source_metabase/schemas/collections.json new file mode 100644 index 000000000000..e9d1c06162a3 --- /dev/null +++ b/airbyte-integrations/connectors/source-metabase/source_metabase/schemas/collections.json @@ -0,0 +1,75 @@ +{ + "type": [ + "null", + "object" + ], + "properties": { + "authority_level": { + "type": [ + "null", + "string" + ] + }, + "description": { + "type": [ + "null", + "string" + ] + }, + "archived": { + "type": [ + "null", + "boolean" + ] + }, + "slug": { + "type": [ + "null", + "string" + ] + }, + "color": { + "type": [ + "null", + "string" + ] + }, + "can_write": { + "type": [ + "null", + "boolean" + ] + }, + "name": { + "type": [ + "null", + "string" + ] + }, + "personal_owner_id": { + "type": [ + "null", + "integer" + ] + }, + "id": { + "type": [ + "null", + "integer", + "string" + ] + }, + "location": { + "type": [ + "null", + "string" + ] + }, + "namespace": { + "type": [ + "null", + "string" + ] + } + } +} diff --git a/airbyte-integrations/connectors/source-metabase/source_metabase/schemas/dashboards.json b/airbyte-integrations/connectors/source-metabase/source_metabase/schemas/dashboards.json new file mode 100644 index 000000000000..d1a6192c3d16 --- /dev/null +++ b/airbyte-integrations/connectors/source-metabase/source_metabase/schemas/dashboards.json @@ -0,0 +1,272 @@ +{ + "type": [ + "null", + "object" + ], + "properties": { + "description": { + "type": [ + "null", + "string" + ] + }, + "archived": { + "type": [ + "null", + "boolean" + ] + }, + "collection_position": { + "type": [ + "null", + "integer" + ] + }, + "creator": { + "type": [ + "null", + "object" + ], + "properties": { + "email": { + "type": [ + "null", + "string" + ] + }, + "first_name": { + "type": [ + "null", + "string" + ] + }, + "last_login": { + "type": [ + "null", + "string" + ] + }, + "is_qbnewb": { + "type": [ + "null", + "boolean" + ] + }, + "is_superuser": { + "type": [ + "null", + "boolean" + ] + }, + "id": { + "type": [ + "null", + "integer" + ] + }, + "last_name": { + "type": [ + "null", + "string" + ] + }, + "date_joined": { + "type": [ + "null", + "string" + ] + }, + "common_name": { + "type": [ + "null", + "string" + ] + } + } + }, + "enable_embedding": { + "type": [ + "null", + "boolean" + ] + }, + "collection_id": { + "type": [ + "null", + "integer" + ] + }, + "show_in_getting_started": { + "type": [ + "null", + "boolean" + ] + }, + "name": { + "type": [ + "null", + "string" + ] + }, + "caveats": { + "type": [ + "null", + "string" + ] + }, + "creator_id": { + "type": [ + "null", + "integer" + ] + }, + "updated_at": { + "type": [ + "null", + "string" + ] + }, + "made_public_by_id": { + "type": [ + "null", + "integer" + ] + }, + "embedding_params": { + "type": [ + "null", + "object" + ] + }, + "cache_ttl": { + "type": [ + "null", + "integer" + ] + }, + "id": { + "type": [ + "null", + "integer" + ] + }, + "position": { + "type": [ + "null", + "string" + ] + }, + "last-edit-info": { + "type": [ + "null", + "object" + ], + "properties": { + "id": { + "type": [ + "null", + "integer" + ] + }, + "email": { + "type": [ + "null", + "string" + ] + }, + "first_name": { + "type": [ + "null", + "string" + ] + }, + "last_name": { + "type": [ + "null", + "string" + ] + }, + "timestamp": { + "type": [ + "null", + "string" + ] + } + } + }, + "parameters": { + "type": [ + "null", + "array" + ], + "items": { + "properties": { + "name": { + "type": [ + "null", + "string" + ] + }, + "slug": { + "type": [ + "null", + "string" + ] + }, + "id": { + "type": [ + "null", + "string" + ] + }, + "type": { + "type": [ + "null", + "string" + ] + }, + "sectionId": { + "type": [ + "null", + "string" + ] + }, + "default": { + "type": [ + "null", + "array", + "string" + ], + "items": { + "type": [ + "null", + "array", + "boolean", + "integer", + "string" + ] + } + } + } + } + }, + "created_at": { + "type": [ + "null", + "string" + ] + }, + "public_uuid": { + "type": [ + "null", + "string" + ] + }, + "points_of_interest": { + "type": [ + "null", + "string" + ] + } + } +} diff --git a/airbyte-integrations/connectors/source-metabase/source_metabase/schemas/users.json b/airbyte-integrations/connectors/source-metabase/source_metabase/schemas/users.json new file mode 100644 index 000000000000..9c354a961308 --- /dev/null +++ b/airbyte-integrations/connectors/source-metabase/source_metabase/schemas/users.json @@ -0,0 +1,38 @@ +{ + "type": [ + "null", + "object" + ], + "properties": { + "id": { + "type": [ + "null", + "integer" + ] + }, + "email": { + "type": [ + "null", + "string" + ] + }, + "first_name": { + "type": [ + "null", + "string" + ] + }, + "last_name": { + "type": [ + "null", + "string" + ] + }, + "common_name": { + "type": [ + "null", + "string" + ] + } + } +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-metabase/source_metabase/source.py b/airbyte-integrations/connectors/source-metabase/source_metabase/source.py new file mode 100644 index 000000000000..1e62c9636782 --- /dev/null +++ b/airbyte-integrations/connectors/source-metabase/source_metabase/source.py @@ -0,0 +1,133 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import logging +from typing import Any, Iterator, List, Mapping, MutableMapping, Tuple + +import requests +from airbyte_cdk.models import AirbyteMessage, ConfiguredAirbyteCatalog +from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.streams import Stream +from airbyte_cdk.sources.streams.http.auth import HttpAuthenticator +from source_metabase.streams import Activity, Cards, Collections, Dashboards, Users + +API_URL = "instance_api_url" +USERNAME = "username" +PASSWORD = "password" +SESSION_TOKEN = "session_token" + + +class MetabaseAuth(HttpAuthenticator): + def __init__(self, logger: logging.Logger, config: Mapping[str, Any]): + self.need_session_close = False + self.session_token = "" + self.logger = logger + self.api_url = config[API_URL] + if USERNAME in config and PASSWORD in config: + self.username = config[USERNAME] + self.password = config[PASSWORD] + if SESSION_TOKEN in config: + self.session_token = config[SESSION_TOKEN] + elif USERNAME in config and PASSWORD in config: + self.session_token = self.get_new_session_token(config[USERNAME], config[PASSWORD]) + else: + raise KeyError("Required parameters (username/password pair or session_token) not found") + # TODO: Try to retrieve latest session_token stored in some state message? + + def get_new_session_token(self, username: str, password: str) -> str: + response = requests.post( + f"{self.api_url}session", headers={"Content-Type": "application/json"}, json={"username": username, "password": password} + ) + response.raise_for_status() + if response.ok: + self.session_token = response.json()["id"] + self.need_session_close = True + self.logger.info(f"New session token generated for {username}") + else: + raise ConnectionError(f"Failed to retrieve new session token, response code {response.status_code} because {response.reason}") + return self.session_token + + def has_valid_token(self) -> bool: + try: + response = requests.get(f"{self.api_url}user/current", headers=self.get_auth_header()) + response.raise_for_status() + except requests.exceptions.HTTPError as e: + if e.response.status_code == 401: + self.logger.warn(f"Unable to connect to Metabase source due to {str(e)}, retrying with a new session_token...") + self.get_new_session_token(self.username, self.password) + response = requests.get(f"{self.api_url}user/current", headers=self.get_auth_header()) + response.raise_for_status() + else: + raise ConnectionError(f"Error while checking connection: {e}") + if response.ok: + json_response = response.json() + self.logger.info( + f"Connection check for Metabase successful for {json_response['common_name']} login at {json_response['last_login']}" + ) + return True + else: + raise ConnectionError(f"Failed to retrieve new session token, response code {response.status_code} because {response.reason}") + + def get_auth_header(self) -> Mapping[str, Any]: + return {"X-Metabase-Session": self.session_token} + + def close_session(self): + if self.need_session_close: + response = requests.delete( + f"{self.api_url}session", headers=self.get_auth_header(), json={"metabase-session-id": self.session_token} + ) + response.raise_for_status() + if response.ok: + self.logger.info("Session successfully closed") + else: + self.logger.info(f"Unable to close session {response.status_code}: {response.reason}") + else: + self.logger.info("Session was not opened by this connector.") + + +class SourceMetabase(AbstractSource): + def __init__(self): + self.session = None + + def check_connection(self, logger: logging.Logger, config: Mapping[str, Any]) -> Tuple[bool, Any]: + session = None + try: + session = MetabaseAuth(logger, config) + return session.has_valid_token(), None + except Exception as e: + return False, e + finally: + if session: + session.close_session() + + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + self.session = MetabaseAuth(logging.getLogger("airbyte"), config) + if not self.session.has_valid_token(): + raise ConnectionError("Failed to connect to source") + args = {"authenticator": self.session, API_URL: config[API_URL]} + return [ + Activity(**args), + Cards(**args), + Collections(**args), + Dashboards(**args), + Users(**args), + ] + + # We override the read method to make sure we close the metabase session and logout + # so we don't keep too many active session_token active. + def read( + self, + logger: logging.Logger, + config: Mapping[str, Any], + catalog: ConfiguredAirbyteCatalog, + state: MutableMapping[str, Any] = None, + ) -> Iterator[AirbyteMessage]: + try: + yield from super().read(logger, config, catalog, state) + finally: + self.close_session() + + def close_session(self): + if self.session: + self.session.close_session() diff --git a/airbyte-integrations/connectors/source-metabase/source_metabase/spec.yaml b/airbyte-integrations/connectors/source-metabase/source_metabase/spec.yaml new file mode 100644 index 000000000000..5e4a40f80547 --- /dev/null +++ b/airbyte-integrations/connectors/source-metabase/source_metabase/spec.yaml @@ -0,0 +1,39 @@ +documentationUrl: https://docs.airbyte.io/integrations/sources/metabase +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Metabase Source Spec + type: object + required: + - instance_api_url + additionalProperties: true + properties: + instance_api_url: + type: string + title: Metabase Instance API URL + description: >- + URL to your metabase instance API + examples: + - "http://localhost:3000/api/" + order: 0 + username: + type: string + order: 1 + password: + type: string + airbyte_secret: true + order: 2 + session_token: + type: string + description: >- + To generate your session token, you need to run the following command: + ``` + curl -X POST \ + -H "Content-Type: application/json" \ + -d '{"username": "person@metabase.com", "password": "fakepassword"}' \ + http://localhost:3000/api/session + ``` + Then copy the value of the `id` field returned by a successful call to that API. + + Note that by default, sessions are good for 14 days and needs to be regenerated. + airbyte_secret: true + order: 3 diff --git a/airbyte-integrations/connectors/source-metabase/source_metabase/streams.py b/airbyte-integrations/connectors/source-metabase/source_metabase/streams.py new file mode 100644 index 000000000000..8d79aca4bc93 --- /dev/null +++ b/airbyte-integrations/connectors/source-metabase/source_metabase/streams.py @@ -0,0 +1,61 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from abc import ABC +from typing import Any, Iterable, Mapping, Optional + +import requests +from airbyte_cdk.sources.streams.http import HttpStream + + +class MetabaseStream(HttpStream, ABC): + def __init__(self, instance_api_url: str, **kwargs): + super().__init__(**kwargs) + self.instance_api_url = instance_api_url + + primary_key = "id" + response_entity = None + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + return None + + @property + def url_base(self) -> str: + return self.instance_api_url + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + response_json = response.json() + if self.response_entity: + result = response_json.get(self.response_entity, []) + else: + result = response_json + yield from result + + +class Activity(MetabaseStream): + def path(self, **kwargs) -> str: + return "activity" + + +class Cards(MetabaseStream): + def path(self, **kwargs) -> str: + return "card" + + +class Collections(MetabaseStream): + def path(self, **kwargs) -> str: + return "collection" + + +class Dashboards(MetabaseStream): + def path(self, **kwargs) -> str: + return "dashboard" + + +class Users(MetabaseStream): + + response_entity = "data" + + def path(self, **kwargs) -> str: + return "user" diff --git a/airbyte-integrations/connectors/source-metabase/unit_tests/test_dummy.py b/airbyte-integrations/connectors/source-metabase/unit_tests/test_dummy.py new file mode 100644 index 000000000000..f1f977513d63 --- /dev/null +++ b/airbyte-integrations/connectors/source-metabase/unit_tests/test_dummy.py @@ -0,0 +1,10 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +def test_dummy(): + """ + Dummy test to prevent gradle from failing test for this connector + """ + assert True diff --git a/airbyte-integrations/connectors/source-mongodb-v2/Dockerfile b/airbyte-integrations/connectors/source-mongodb-v2/Dockerfile index a84c6bcbd380..22d74754a13c 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/Dockerfile +++ b/airbyte-integrations/connectors/source-mongodb-v2/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-mongodb-v2 COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.14 +LABEL io.airbyte.version=0.1.15 LABEL io.airbyte.name=airbyte/source-mongodb-v2 diff --git a/airbyte-integrations/connectors/source-mongodb-v2/build.gradle b/airbyte-integrations/connectors/source-mongodb-v2/build.gradle index 0541cfa770e5..6b776c6192dc 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/build.gradle +++ b/airbyte-integrations/connectors/source-mongodb-v2/build.gradle @@ -18,7 +18,7 @@ dependencies { implementation 'org.mongodb:mongodb-driver-sync:4.4.0' - testImplementation libs.testcontainers.mongodb + testImplementation libs.connectors.testcontainers.mongodb integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:source-mongodb-v2') diff --git a/airbyte-integrations/connectors/source-mssql-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/source-mssql-strict-encrypt/Dockerfile index a4672e9759fd..3aec07d0ea9a 100644 --- a/airbyte-integrations/connectors/source-mssql-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/source-mssql-strict-encrypt/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-mssql-strict-encrypt COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.4.2 +LABEL io.airbyte.version=0.4.5 LABEL io.airbyte.name=airbyte/source-mssql-strict-encrypt diff --git a/airbyte-integrations/connectors/source-mssql-strict-encrypt/build.gradle b/airbyte-integrations/connectors/source-mssql-strict-encrypt/build.gradle index 149a3742b9d8..eb2077c7d275 100644 --- a/airbyte-integrations/connectors/source-mssql-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/source-mssql-strict-encrypt/build.gradle @@ -21,7 +21,7 @@ dependencies { testImplementation testFixtures(project(':airbyte-integrations:connectors:source-jdbc')) testImplementation 'org.apache.commons:commons-lang3:3.11' - testImplementation libs.testcontainers.mssqlserver + testImplementation libs.connectors.testcontainers.mssqlserver integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:source-mssql-strict-encrypt') diff --git a/airbyte-integrations/connectors/source-mssql/Dockerfile b/airbyte-integrations/connectors/source-mssql/Dockerfile index eadfd5211f0a..be16fbac91c2 100644 --- a/airbyte-integrations/connectors/source-mssql/Dockerfile +++ b/airbyte-integrations/connectors/source-mssql/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-mssql COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.4.2 +LABEL io.airbyte.version=0.4.5 LABEL io.airbyte.name=airbyte/source-mssql diff --git a/airbyte-integrations/connectors/source-mssql/build.gradle b/airbyte-integrations/connectors/source-mssql/build.gradle index c7060a7fe620..3fec401df68a 100644 --- a/airbyte-integrations/connectors/source-mssql/build.gradle +++ b/airbyte-integrations/connectors/source-mssql/build.gradle @@ -15,7 +15,7 @@ dependencies { implementation project(':airbyte-db:db-lib') implementation project(':airbyte-integrations:bases:base-java') - implementation project(':airbyte-integrations:bases:debezium') + implementation project(':airbyte-integrations:bases:debezium-v1-4-2') implementation project(':airbyte-protocol:protocol-models') implementation project(':airbyte-integrations:connectors:source-jdbc') implementation project(':airbyte-integrations:connectors:source-relational-db') @@ -23,11 +23,11 @@ dependencies { implementation 'io.debezium:debezium-connector-sqlserver:1.4.2.Final' implementation 'com.microsoft.sqlserver:mssql-jdbc:8.4.1.jre14' - testImplementation testFixtures(project(':airbyte-integrations:bases:debezium')) + testImplementation testFixtures(project(':airbyte-integrations:bases:debezium-v1-4-2')) testImplementation testFixtures(project(':airbyte-integrations:connectors:source-jdbc')) testImplementation 'org.apache.commons:commons-lang3:3.11' - testImplementation libs.testcontainers.mssqlserver + testImplementation libs.connectors.testcontainers.mssqlserver integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') performanceTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') diff --git a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlCdcStateHandler.java b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlCdcStateHandler.java index 63f92f7977c4..ad275bda45c2 100644 --- a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlCdcStateHandler.java +++ b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlCdcStateHandler.java @@ -10,13 +10,14 @@ import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.debezium.CdcStateHandler; -import io.airbyte.integrations.source.relationaldb.StateManager; import io.airbyte.integrations.source.relationaldb.models.CdcState; +import io.airbyte.integrations.source.relationaldb.state.StateManager; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteMessage.Type; import io.airbyte.protocol.models.AirbyteStateMessage; import java.util.HashMap; import java.util.Map; +import java.util.Optional; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -41,7 +42,11 @@ public AirbyteMessage saveState(final Map offset, final String d final CdcState cdcState = new CdcState().withState(asJson); stateManager.getCdcStateManager().setCdcState(cdcState); - final AirbyteStateMessage stateMessage = stateManager.emit(); + /* + * Namespace pair is ignored by global state manager, but is needed for satisfy the API contract. + * Therefore, provide an empty optional. + */ + final AirbyteStateMessage stateMessage = stateManager.emit(Optional.empty()); return new AirbyteMessage().withType(Type.STATE).withState(stateMessage); } diff --git a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlSource.java b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlSource.java index 2a770d8e1ddd..3cf1b62600ac 100644 --- a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlSource.java +++ b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlSource.java @@ -4,6 +4,7 @@ package io.airbyte.integrations.source.mssql; +import static io.airbyte.integrations.debezium.AirbyteDebeziumHandler.shouldUseCDC; import static io.airbyte.integrations.debezium.internals.DebeziumEventUtils.CDC_DELETED_AT; import static io.airbyte.integrations.debezium.internals.DebeziumEventUtils.CDC_UPDATED_AT; import static java.util.stream.Collectors.toList; @@ -25,8 +26,8 @@ import io.airbyte.integrations.debezium.AirbyteDebeziumHandler; import io.airbyte.integrations.source.jdbc.AbstractJdbcSource; import io.airbyte.integrations.source.mssql.MssqlCdcHelper.SnapshotIsolation; -import io.airbyte.integrations.source.relationaldb.StateManager; import io.airbyte.integrations.source.relationaldb.TableInfo; +import io.airbyte.integrations.source.relationaldb.state.StateManager; import io.airbyte.protocol.models.AirbyteCatalog; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteStream; @@ -370,13 +371,6 @@ public List> getIncrementalIterators( } } - private static boolean shouldUseCDC(final ConfiguredAirbyteCatalog catalog) { - final Optional any = catalog.getStreams().stream() - .map(ConfiguredAirbyteStream::getSyncMode) - .filter(syncMode -> syncMode == SyncMode.INCREMENTAL).findAny(); - return any.isPresent(); - } - // Note: in place mutation. private static AirbyteStream removeIncrementalWithoutPk(final AirbyteStream stream) { if (stream.getSourceDefinedPrimaryKey().isEmpty()) { diff --git a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/AbstractSshMssqlSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/AbstractSshMssqlSourceAcceptanceTest.java index 2d7ab44a9cd4..c52d9f081aa4 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/AbstractSshMssqlSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/AbstractSshMssqlSourceAcceptanceTest.java @@ -37,10 +37,11 @@ public abstract class AbstractSshMssqlSourceAcceptanceTest extends SourceAccepta private static final String STREAM_NAME = "dbo.id_and_name"; private static final String STREAM_NAME2 = "dbo.starships"; + private static final Network network = Network.newNetwork(); + private static JsonNode config; private String dbName; private MSSQLServerContainer db; private final SshBastionContainer bastion = new SshBastionContainer(); - private static JsonNode config; public abstract SshTunnel.TunnelMethod getTunnelMethod(); @@ -56,7 +57,7 @@ public ImmutableMap.Builder getMSSQLDbConfigBuilder(final JdbcDa return ImmutableMap.builder() .put("host", Objects.requireNonNull(db.getContainerInfo().getNetworkSettings() .getNetworks() - .get(((Network.NetworkImpl) bastion.getNetWork()).getName()) + .get(((Network.NetworkImpl) network).getName()) .getIpAddress())) .put("username", db.getUsername()) .put("password", db.getPassword()) @@ -77,13 +78,13 @@ private static Database getDatabaseFromConfig(final JsonNode config) { } private void startTestContainers() { - bastion.initAndStartBastion(); + bastion.initAndStartBastion(network); initAndStartJdbcContainer(); } private void initAndStartJdbcContainer() { db = new MSSQLServerContainer<>("mcr.microsoft.com/mssql/server:2017-latest") - .withNetwork(bastion.getNetWork()) + .withNetwork(network) .acceptLicense(); db.start(); } diff --git a/airbyte-integrations/connectors/source-mysql-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/source-mysql-strict-encrypt/Dockerfile index 4a95c3c9cd04..9c98b4ead299 100644 --- a/airbyte-integrations/connectors/source-mysql-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/source-mysql-strict-encrypt/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-mysql-strict-encrypt COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.5.10 +LABEL io.airbyte.version=0.5.15 LABEL io.airbyte.name=airbyte/source-mysql-strict-encrypt diff --git a/airbyte-integrations/connectors/source-mysql-strict-encrypt/build.gradle b/airbyte-integrations/connectors/source-mysql-strict-encrypt/build.gradle index f8a2804862fe..845cd0c878f0 100644 --- a/airbyte-integrations/connectors/source-mysql-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/source-mysql-strict-encrypt/build.gradle @@ -21,7 +21,7 @@ dependencies { testImplementation testFixtures(project(':airbyte-integrations:connectors:source-jdbc')) testImplementation project(':airbyte-test-utils') - testImplementation libs.testcontainers.mysql + testImplementation libs.connectors.testcontainers.mysql integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') diff --git a/airbyte-integrations/connectors/source-mysql-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/mysql_strict_encrypt/MySqlStrictEncryptSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mysql-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/mysql_strict_encrypt/MySqlStrictEncryptSourceAcceptanceTest.java index a590dc291ff2..b915ebdb1dff 100644 --- a/airbyte-integrations/connectors/source-mysql-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/mysql_strict_encrypt/MySqlStrictEncryptSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mysql-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/mysql_strict_encrypt/MySqlStrictEncryptSourceAcceptanceTest.java @@ -55,7 +55,7 @@ protected void setupEnvironment(final TestDestinationEnv environment) throws Exc try (final DSLContext dslContext = DSLContextFactory.create( config.get("username").asText(), - "", + config.get("password").asText(), DatabaseDriver.MYSQL.getDriverClassName(), String.format("jdbc:mysql://%s:%s/%s?%s", config.get("host").asText(), diff --git a/airbyte-integrations/connectors/source-mysql-strict-encrypt/src/test/java/io/airbyte/integrations/source/mysql_strict_encrypt/MySqlStrictEncryptJdbcSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mysql-strict-encrypt/src/test/java/io/airbyte/integrations/source/mysql_strict_encrypt/MySqlStrictEncryptJdbcSourceAcceptanceTest.java index 2422fc5f9972..64de8d65db14 100644 --- a/airbyte-integrations/connectors/source-mysql-strict-encrypt/src/test/java/io/airbyte/integrations/source/mysql_strict_encrypt/MySqlStrictEncryptJdbcSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mysql-strict-encrypt/src/test/java/io/airbyte/integrations/source/mysql_strict_encrypt/MySqlStrictEncryptJdbcSourceAcceptanceTest.java @@ -59,13 +59,13 @@ public void setup() throws Exception { .put("host", container.getHost()) .put("port", container.getFirstMappedPort()) .put("database", Strings.addRandomSuffix("db", "_", 10)) - .put("username", TEST_USER) - .put("password", TEST_PASSWORD) + .put("username", container.getUsername()) + .put("password", container.getPassword()) .build()); dslContext = DSLContextFactory.create( config.get("username").asText(), - "", + config.get("password").asText(), DatabaseDriver.MYSQL.getDriverClassName(), String.format("jdbc:mysql://%s:%s?%s", config.get("host").asText(), diff --git a/airbyte-integrations/connectors/source-mysql/Dockerfile b/airbyte-integrations/connectors/source-mysql/Dockerfile index 1d321c00562d..1444cba8665c 100644 --- a/airbyte-integrations/connectors/source-mysql/Dockerfile +++ b/airbyte-integrations/connectors/source-mysql/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-mysql COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.5.11 +LABEL io.airbyte.version=0.5.15 LABEL io.airbyte.name=airbyte/source-mysql diff --git a/airbyte-integrations/connectors/source-mysql/build.gradle b/airbyte-integrations/connectors/source-mysql/build.gradle index 557b3b4e5e99..e99e34ca0d60 100644 --- a/airbyte-integrations/connectors/source-mysql/build.gradle +++ b/airbyte-integrations/connectors/source-mysql/build.gradle @@ -10,28 +10,10 @@ application { applicationDefaultJvmArgs = ['-XX:+ExitOnOutOfMemoryError', '-XX:MaxRAMPercentage=75.0'] } -configurations { - /* - * For some reason, the MySQL testcontainer does not start properly on - * newer versions of the testcontainers library. Therefore, pin the version - * to the known working version to ensure that the tests continue to work. - */ - testRuntimeClasspath { - resolutionStrategy.force 'org.testcontainers:testcontainers:1.15.3' - resolutionStrategy.force 'org.testcontainers:jdbc:1.15.3' - resolutionStrategy.force 'org.testcontainers:mysql:1.15.3' - } - integrationTestRuntimeClasspath { - resolutionStrategy.force 'org.testcontainers:testcontainers:1.15.3' - resolutionStrategy.force 'org.testcontainers:jdbc:1.15.3' - resolutionStrategy.force 'org.testcontainers:mysql:1.15.3' - } -} - dependencies { implementation project(':airbyte-db:db-lib') implementation project(':airbyte-integrations:bases:base-java') - implementation project(':airbyte-integrations:bases:debezium') + implementation project(':airbyte-integrations:bases:debezium-v1-4-2') implementation project(':airbyte-integrations:connectors:source-jdbc') implementation project(':airbyte-protocol:protocol-models') implementation project(':airbyte-integrations:connectors:source-relational-db') @@ -39,10 +21,10 @@ dependencies { implementation 'mysql:mysql-connector-java:8.0.22' implementation 'org.apache.commons:commons-lang3:3.11' - testImplementation testFixtures(project(':airbyte-integrations:bases:debezium')) + testImplementation testFixtures(project(':airbyte-integrations:bases:debezium-v1-4-2')) testImplementation testFixtures(project(':airbyte-integrations:connectors:source-jdbc')) testImplementation 'org.apache.commons:commons-lang3:3.11' - testImplementation 'org.testcontainers:mysql:1.15.3' + testImplementation libs.connectors.testcontainers.mysql integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:source-mysql') diff --git a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlCdcStateHandler.java b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlCdcStateHandler.java index d6171c06ff82..e896f3082ce7 100644 --- a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlCdcStateHandler.java +++ b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlCdcStateHandler.java @@ -10,13 +10,14 @@ import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.debezium.CdcStateHandler; -import io.airbyte.integrations.source.relationaldb.StateManager; import io.airbyte.integrations.source.relationaldb.models.CdcState; +import io.airbyte.integrations.source.relationaldb.state.StateManager; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteMessage.Type; import io.airbyte.protocol.models.AirbyteStateMessage; import java.util.HashMap; import java.util.Map; +import java.util.Optional; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -42,7 +43,11 @@ public AirbyteMessage saveState(final Map offset, final String d final CdcState cdcState = new CdcState().withState(asJson); stateManager.getCdcStateManager().setCdcState(cdcState); - final AirbyteStateMessage stateMessage = stateManager.emit(); + /* + * Namespace pair is ignored by global state manager, but is needed for satisfy the API contract. + * Therefore, provide an empty optional. + */ + final AirbyteStateMessage stateMessage = stateManager.emit(Optional.empty()); return new AirbyteMessage().withType(Type.STATE).withState(stateMessage); } diff --git a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlSource.java b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlSource.java index ea435043efc9..1d3bbb90f899 100644 --- a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlSource.java +++ b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlSource.java @@ -4,6 +4,7 @@ package io.airbyte.integrations.source.mysql; +import static io.airbyte.integrations.debezium.AirbyteDebeziumHandler.shouldUseCDC; import static io.airbyte.integrations.debezium.internals.DebeziumEventUtils.CDC_DELETED_AT; import static io.airbyte.integrations.debezium.internals.DebeziumEventUtils.CDC_UPDATED_AT; import static io.airbyte.integrations.source.mysql.helpers.CdcConfigurationHelper.checkBinlog; @@ -25,9 +26,9 @@ import io.airbyte.integrations.debezium.AirbyteDebeziumHandler; import io.airbyte.integrations.source.jdbc.AbstractJdbcSource; import io.airbyte.integrations.source.mysql.helpers.CdcConfigurationHelper; -import io.airbyte.integrations.source.relationaldb.StateManager; import io.airbyte.integrations.source.relationaldb.TableInfo; import io.airbyte.integrations.source.relationaldb.models.CdcState; +import io.airbyte.integrations.source.relationaldb.state.StateManager; import io.airbyte.protocol.models.AirbyteCatalog; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteStream; @@ -168,12 +169,6 @@ private static boolean isCdc(final JsonNode config) { .equals(ReplicationMethod.CDC); } - private static boolean shouldUseCDC(final ConfiguredAirbyteCatalog catalog) { - final Optional any = catalog.getStreams().stream().map(ConfiguredAirbyteStream::getSyncMode) - .filter(syncMode -> syncMode == SyncMode.INCREMENTAL).findAny(); - return any.isPresent(); - } - @Override public List> getIncrementalIterators(final JdbcDatabase database, final ConfiguredAirbyteCatalog catalog, diff --git a/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/AbstractMySqlSourceDatatypeTest.java b/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/AbstractMySqlSourceDatatypeTest.java new file mode 100644 index 000000000000..04b9db39ab3a --- /dev/null +++ b/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/AbstractMySqlSourceDatatypeTest.java @@ -0,0 +1,434 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.mysql; + +import com.fasterxml.jackson.databind.JsonNode; +import com.mysql.cj.MysqlType; +import io.airbyte.db.Database; +import io.airbyte.integrations.standardtest.source.AbstractSourceDatabaseTypeTest; +import io.airbyte.integrations.standardtest.source.TestDataHolder; +import io.airbyte.protocol.models.JsonSchemaType; +import java.io.File; +import java.io.IOException; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.apache.commons.codec.binary.Base64; +import org.apache.commons.io.FileUtils; +import org.apache.commons.lang3.RandomStringUtils; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.testcontainers.containers.MySQLContainer; + +public abstract class AbstractMySqlSourceDatatypeTest extends AbstractSourceDatabaseTypeTest { + + protected static final Logger LOGGER = LoggerFactory.getLogger(AbstractMySqlSourceDatatypeTest.class); + + protected MySQLContainer container; + protected JsonNode config; + + @Override + protected JsonNode getConfig() { + return config; + } + + @Override + protected String getImageName() { + return "airbyte/source-mysql:dev"; + } + + @Override + protected abstract Database setupDatabase() throws Exception; + + @Override + protected String getNameSpace() { + return container.getDatabaseName(); + } + + @Override + protected void initTests() { + // bit defaults to bit(1), which is equivalent to boolean + addDataTypeTestData( + TestDataHolder.builder() + .sourceType("bit") + .airbyteType(JsonSchemaType.BOOLEAN) + .addInsertValues("null", "1", "0") + .addExpectedValues(null, "true", "false") + .build()); + + // bit(1) is equivalent to boolean + addDataTypeTestData( + TestDataHolder.builder() + .sourceType("bit") + .fullSourceDataType("bit(1)") + .airbyteType(JsonSchemaType.BOOLEAN) + .addInsertValues("null", "1", "0") + .addExpectedValues(null, "true", "false") + .build()); + + // bit(>1) is binary + addDataTypeTestData( + TestDataHolder.builder() + .sourceType("bit") + .fullSourceDataType("bit(7)") + .airbyteType(JsonSchemaType.STRING_BASE_64) + // 1000001 is binary for A + .addInsertValues("null", "b'1000001'") + // QQo= is base64 encoding in charset UTF-8 for A + .addExpectedValues(null, "QQ==") + .build()); + + // tinyint without width + addDataTypeTestData( + TestDataHolder.builder() + .sourceType("tinyint") + .airbyteType(JsonSchemaType.NUMBER) + .addInsertValues("null", "-128", "127") + .addExpectedValues(null, "-128", "127") + .build()); + + // tinyint(1) is equivalent to boolean + addDataTypeTestData( + TestDataHolder.builder() + .sourceType("tinyint") + .fullSourceDataType("tinyint(1)") + .airbyteType(JsonSchemaType.BOOLEAN) + .addInsertValues("null", "1", "0") + .addExpectedValues(null, "true", "false") + .build()); + + addDataTypeTestData( + TestDataHolder.builder() + .sourceType("tinyint") + .fullSourceDataType("tinyint(2)") + .airbyteType(JsonSchemaType.NUMBER) + .addInsertValues("null", "-128", "127") + .addExpectedValues(null, "-128", "127") + .build()); + + final Set booleanTypes = Set.of("BOOLEAN", "BOOL"); + for (final String booleanType : booleanTypes) { + addDataTypeTestData( + TestDataHolder.builder() + .sourceType(booleanType) + .airbyteType(JsonSchemaType.BOOLEAN) + // MySql booleans are tinyint(1), and only 1 is true + .addInsertValues("null", "1", "0") + .addExpectedValues(null, "true", "false") + .build()); + } + + addDataTypeTestData( + TestDataHolder.builder() + .sourceType("smallint") + .airbyteType(JsonSchemaType.NUMBER) + .addInsertValues("null", "-32768", "32767") + .addExpectedValues(null, "-32768", "32767") + .build()); + + addDataTypeTestData( + TestDataHolder.builder() + .sourceType("smallint") + .airbyteType(JsonSchemaType.NUMBER) + .fullSourceDataType("smallint zerofill") + .addInsertValues("1") + .addExpectedValues("1") + .build()); + + addDataTypeTestData( + TestDataHolder.builder() + .sourceType("smallint") + .airbyteType(JsonSchemaType.NUMBER) + .fullSourceDataType("smallint unsigned") + .addInsertValues("null", "0", "65535") + .addExpectedValues(null, "0", "65535") + .build()); + + addDataTypeTestData( + TestDataHolder.builder() + .sourceType("mediumint") + .airbyteType(JsonSchemaType.NUMBER) + .addInsertValues("null", "-8388608", "8388607") + .addExpectedValues(null, "-8388608", "8388607") + .build()); + + addDataTypeTestData( + TestDataHolder.builder() + .sourceType("mediumint") + .airbyteType(JsonSchemaType.NUMBER) + .fullSourceDataType("mediumint zerofill") + .addInsertValues("1") + .addExpectedValues("1") + .build()); + + addDataTypeTestData( + TestDataHolder.builder() + .sourceType("int") + .airbyteType(JsonSchemaType.NUMBER) + .addInsertValues("null", "-2147483648", "2147483647") + .addExpectedValues(null, "-2147483648", "2147483647") + .build()); + + addDataTypeTestData( + TestDataHolder.builder() + .sourceType("int") + .airbyteType(JsonSchemaType.NUMBER) + .fullSourceDataType("int unsigned") + .addInsertValues("3428724653") + .addExpectedValues("3428724653") + .build()); + + addDataTypeTestData( + TestDataHolder.builder() + .sourceType("int") + .airbyteType(JsonSchemaType.NUMBER) + .fullSourceDataType("int zerofill") + .addInsertValues("1") + .addExpectedValues("1") + .build()); + + addDataTypeTestData( + TestDataHolder.builder() + .sourceType("bigint") + .airbyteType(JsonSchemaType.NUMBER) + .addInsertValues("null", "9223372036854775807") + .addExpectedValues(null, "9223372036854775807") + .build()); + + addDataTypeTestData( + TestDataHolder.builder() + .sourceType("float") + .airbyteType(JsonSchemaType.NUMBER) + .addInsertValues("null", "10.5") + .addExpectedValues(null, "10.5") + .build()); + + addDataTypeTestData( + TestDataHolder.builder() + .sourceType("double") + .airbyteType(JsonSchemaType.NUMBER) + .addInsertValues("null", "power(10, 308)", "1/power(10, 45)", "10.5") + .addExpectedValues(null, String.valueOf(Math.pow(10, 308)), String.valueOf(1 / Math.pow(10, 45)), "10.5") + .build()); + + addDataTypeTestData( + TestDataHolder.builder() + .sourceType("decimal") + .airbyteType(JsonSchemaType.NUMBER) + .fullSourceDataType("decimal(10,3)") + .addInsertValues("0.188", "null") + .addExpectedValues("0.188", null) + .build()); + + addDataTypeTestData( + TestDataHolder.builder() + .sourceType("decimal") + .airbyteType(JsonSchemaType.NUMBER) + .fullSourceDataType("decimal(19,2)") + .addInsertValues("1700000.01") + .addExpectedValues("1700000.01") + .build()); + + addDataTypeTestData( + TestDataHolder.builder() + .sourceType("date") + .airbyteType(JsonSchemaType.STRING) + .addInsertValues("null", "'2021-01-01'") + .addExpectedValues(null, "2021-01-01T00:00:00Z") + .build()); + + addDataTypeTestData( + TestDataHolder.builder() + .sourceType("datetime") + .airbyteType(JsonSchemaType.STRING) + .addInsertValues("null", "'2005-10-10 23:22:21'") + .addExpectedValues(null, "2005-10-10T23:22:21.000000Z") + .build()); + + addDataTypeTestData( + TestDataHolder.builder() + .sourceType("timestamp") + .airbyteType(JsonSchemaType.STRING) + .addInsertValues("null", "'2021-01-00'", "'2021-00-00'", "'0000-00-00'") + .addExpectedValues(null, null, null, null) + .build()); + + addDataTypeTestData( + TestDataHolder.builder() + .sourceType("time") + .airbyteType(JsonSchemaType.STRING) + // JDBC driver can process only "clock"(00:00:00-23:59:59) values. + .addInsertValues("null", "'-23:59:59'", "'00:00:00'") + .addExpectedValues(null, "1970-01-01T23:59:59Z", "1970-01-01T00:00:00Z") + .build()); + + addDataTypeTestData( + TestDataHolder.builder() + .sourceType("year") + .airbyteType(JsonSchemaType.STRING) + // MySQL converts values in the ranges '0' - '69' to YEAR value in the range 2000 - 2069 + // and '70' - '99' to 1970 - 1999. + .addInsertValues("null", "'1997'", "'0'", "'50'", "'70'", "'80'", "'99'") + .addExpectedValues(null, "1997", "2000", "2050", "1970", "1980", "1999") + .build()); + + // char types can be string or binary, so they are tested separately + final Set charTypes = Stream.of(MysqlType.CHAR, MysqlType.VARCHAR) + .map(Enum::name) + .collect(Collectors.toSet()); + for (final String charType : charTypes) { + addDataTypeTestData( + TestDataHolder.builder() + .sourceType(charType) + .airbyteType(JsonSchemaType.STRING) + .fullSourceDataType(charType + "(63)") + .addInsertValues("null", "'Airbyte'", "'!\"#$%&\\'()*+,-./:;<=>?\\@[\\]^_\\`{|}~'") + .addExpectedValues(null, "Airbyte", "!\"#$%&'()*+,-./:;<=>?@[]^_`{|}~") + .build()); + + addDataTypeTestData( + TestDataHolder.builder() + .sourceType(charType) + .airbyteType(JsonSchemaType.STRING) + .fullSourceDataType(charType + "(63) character set utf16") + .addInsertValues("0xfffd") + .addExpectedValues("ļæ½") + .build()); + + addDataTypeTestData( + TestDataHolder.builder() + .sourceType(charType) + .airbyteType(JsonSchemaType.STRING) + .fullSourceDataType(charType + "(63) character set cp1251") + .addInsertValues("'тŠµŃŃ‚'") + .addExpectedValues("тŠµŃŃ‚") + .build()); + + // when charset is binary, return binary in base64 encoding in charset UTF-8 + addDataTypeTestData( + TestDataHolder.builder() + .sourceType(charType) + .airbyteType(JsonSchemaType.STRING_BASE_64) + .fullSourceDataType(charType + "(7) character set binary") + .addInsertValues("null", "'Airbyte'") + .addExpectedValues(null, "QWlyYnl0ZQ==") + .build()); + } + + final Set blobTypes = Stream + .of(MysqlType.TINYBLOB, MysqlType.BLOB, MysqlType.MEDIUMBLOB, MysqlType.LONGBLOB) + .map(Enum::name) + .collect(Collectors.toSet()); + for (final String blobType : blobTypes) { + addDataTypeTestData( + TestDataHolder.builder() + .sourceType(blobType) + .airbyteType(JsonSchemaType.STRING_BASE_64) + .addInsertValues("null", "'Airbyte'") + .addExpectedValues(null, "QWlyYnl0ZQ==") + .build()); + } + + // binary appends '\0' to the end of the string + addDataTypeTestData( + TestDataHolder.builder() + .sourceType(MysqlType.BINARY.name()) + .fullSourceDataType(MysqlType.BINARY.name() + "(10)") + .airbyteType(JsonSchemaType.STRING_BASE_64) + .addInsertValues("null", "'Airbyte'") + .addExpectedValues(null, "QWlyYnl0ZQAAAA==") + .build()); + + // varbinary does not append '\0' to the end of the string + addDataTypeTestData( + TestDataHolder.builder() + .sourceType(MysqlType.VARBINARY.name()) + .fullSourceDataType(MysqlType.VARBINARY.name() + "(10)") + .airbyteType(JsonSchemaType.STRING_BASE_64) + .addInsertValues("null", "'Airbyte'") + .addExpectedValues(null, "QWlyYnl0ZQ==") + .build()); + + addDataTypeTestData( + TestDataHolder.builder() + .sourceType(MysqlType.VARBINARY.name()) + .airbyteType(JsonSchemaType.STRING_BASE_64) + .fullSourceDataType(MysqlType.VARBINARY.name() + "(20000)") // size should be enough to save test.png + .addInsertValues("null", "'test'", "'тŠµŃŃ‚'", String.format("FROM_BASE64('%s')", getFileDataInBase64())) + .addExpectedValues(null, "dGVzdA==", "0YLQtdGB0YI=", getFileDataInBase64()) + .build()); + + final Set textTypes = Stream + .of(MysqlType.TINYTEXT, MysqlType.TEXT, MysqlType.MEDIUMTEXT, MysqlType.LONGTEXT) + .map(Enum::name) + .collect(Collectors.toSet()); + final String randomText = RandomStringUtils.random(50, true, true); + for (final String textType : textTypes) { + addDataTypeTestData( + TestDataHolder.builder() + .sourceType(textType) + .airbyteType(JsonSchemaType.STRING) + .addInsertValues("null", "'Airbyte'", String.format("'%s'", randomText)) + .addExpectedValues(null, "Airbyte", randomText) + .build()); + } + + addDataTypeTestData( + TestDataHolder.builder() + .sourceType("mediumtext") + .airbyteType(JsonSchemaType.STRING) + .addInsertValues(getLogString(1048000), "'test'") + .addExpectedValues(StringUtils.leftPad("0", 1048000, "0"), "test") + .build()); + + addDataTypeTestData( + TestDataHolder.builder() + .sourceType("json") + .airbyteType(JsonSchemaType.STRING) + .addInsertValues("null", "'{\"a\": 10, \"b\": 15}'") + .addExpectedValues(null, "{\"a\": 10, \"b\": 15}") + .build()); + + addDataTypeTestData( + TestDataHolder.builder() + .sourceType("enum") + .fullSourceDataType("ENUM('xs', 's', 'm', 'l', 'xl')") + .airbyteType(JsonSchemaType.STRING) + .addInsertValues("null", "'xs'", "'m'") + .addExpectedValues(null, "xs", "m") + .build()); + + addDataTypeTestData( + TestDataHolder.builder() + .sourceType("set") + .fullSourceDataType("SET('xs', 's', 'm', 'l', 'xl')") + .airbyteType(JsonSchemaType.STRING) + .addInsertValues("null", "'xs,s'", "'m,xl'") + .addExpectedValues(null, "xs,s", "m,xl") + .build()); + + } + + private String getLogString(final int length) { + final int maxLpadLength = 262144; + final StringBuilder stringBuilder = new StringBuilder("concat("); + final int fullChunks = length / maxLpadLength; + stringBuilder.append("lpad('0', 262144, '0'),".repeat(fullChunks)); + stringBuilder.append("lpad('0', ").append(length % maxLpadLength).append(", '0'))"); + return stringBuilder.toString(); + } + + private String getFileDataInBase64() { + final File file = new File(getClass().getClassLoader().getResource("test.png").getFile()); + try { + return Base64.encodeBase64String(FileUtils.readFileToByteArray(file)); + } catch (final IOException e) { + LOGGER.error(String.format("Fail to read the file: %s. Error: %s", file.getAbsoluteFile(), e.getMessage())); + } + return null; + } + +} diff --git a/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/CdcMySqlSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/CdcMySqlSourceAcceptanceTest.java index 9903e6f018af..b23b8953fc82 100644 --- a/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/CdcMySqlSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/CdcMySqlSourceAcceptanceTest.java @@ -5,11 +5,12 @@ package io.airbyte.integrations.source.mysql; import static io.airbyte.protocol.models.SyncMode.INCREMENTAL; -import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; @@ -174,14 +175,12 @@ public void testIncrementalSyncFailedIfBinlogIsDeleted() throws Exception { // when we run incremental sync again there should be no new records. Run a sync with the latest // state message and assert no records were emitted. - final JsonNode latestState = stateMessages.get(stateMessages.size() - 1).getData(); + final JsonNode latestState = Jsons.jsonNode(supportsPerStream() ? stateMessages : List.of(Iterables.getLast(stateMessages))); // RESET MASTER removes all binary log files that are listed in the index file, // leaving only a single, empty binary log file with a numeric suffix of .000001 executeQuery("RESET MASTER;"); - // Uncaught exceptions are now handled by the AirbyteExceptionHandler, so - // it will not be thrown outside the connector execution. - assertDoesNotThrow(() -> filterRecords(runRead(configuredCatalog, latestState))); + assertThrows(Exception.class, () -> filterRecords(runRead(configuredCatalog, latestState))); } } diff --git a/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/CdcMySqlSourceDatatypeTest.java b/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/CdcMySqlSourceDatatypeTest.java index 781b156b6526..9fb7be7a2664 100644 --- a/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/CdcMySqlSourceDatatypeTest.java +++ b/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/CdcMySqlSourceDatatypeTest.java @@ -4,51 +4,26 @@ package io.airbyte.integrations.source.mysql; -import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; import io.airbyte.db.factory.DSLContextFactory; import io.airbyte.db.factory.DatabaseDriver; -import io.airbyte.integrations.standardtest.source.AbstractSourceDatabaseTypeTest; -import io.airbyte.integrations.standardtest.source.TestDataHolder; import io.airbyte.integrations.standardtest.source.TestDestinationEnv; -import io.airbyte.protocol.models.JsonSchemaType; -import java.io.File; -import java.io.IOException; -import org.apache.commons.codec.binary.Base64; -import org.apache.commons.io.FileUtils; -import org.apache.commons.lang3.StringUtils; import org.jooq.DSLContext; import org.jooq.SQLDialect; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.testcontainers.containers.MySQLContainer; -public class CdcMySqlSourceDatatypeTest extends AbstractSourceDatabaseTypeTest { +public class CdcMySqlSourceDatatypeTest extends AbstractMySqlSourceDatatypeTest { - private static final Logger LOGGER = LoggerFactory.getLogger(CdcMySqlSourceDatatypeTest.class); - - private MySQLContainer container; - private JsonNode config; private DSLContext dslContext; - @Override - protected JsonNode getConfig() { - return config; - } - @Override protected void tearDown(final TestDestinationEnv testEnv) { dslContext.close(); container.close(); } - @Override - protected String getImageName() { - return "airbyte/source-mysql:dev"; - } - @Override protected Database setupDatabase() throws Exception { container = new MySQLContainer<>("mysql:8.0"); @@ -84,11 +59,6 @@ protected Database setupDatabase() throws Exception { return database; } - @Override - protected String getNameSpace() { - return container.getDatabaseName(); - } - private void revokeAllPermissions() { executeQuery("REVOKE ALL PRIVILEGES, GRANT OPTION FROM " + container.getUsername() + "@'%';"); } @@ -118,310 +88,4 @@ private void executeQuery(final String query) { } } - @Override - protected void initTests() { - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("tinyint") - .airbyteType(JsonSchemaType.NUMBER) - .addInsertValues("null", "-128", "127") - .addExpectedValues(null, "-128", "127") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("smallint") - .airbyteType(JsonSchemaType.NUMBER) - .addInsertValues("null", "-32768", "32767") - .addExpectedValues(null, "-32768", "32767") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("smallint") - .airbyteType(JsonSchemaType.NUMBER) - .fullSourceDataType("smallint zerofill") - .addInsertValues("1") - .addExpectedValues("1") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("mediumint") - .airbyteType(JsonSchemaType.NUMBER) - .addInsertValues("null", "-8388608", "8388607") - .addExpectedValues(null, "-8388608", "8388607") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("mediumint") - .airbyteType(JsonSchemaType.NUMBER) - .fullSourceDataType("mediumint zerofill") - .addInsertValues("1") - .addExpectedValues("1") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("int") - .airbyteType(JsonSchemaType.NUMBER) - .addInsertValues("null", "-2147483648", "2147483647") - .addExpectedValues(null, "-2147483648", "2147483647") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("int") - .airbyteType(JsonSchemaType.NUMBER) - .fullSourceDataType("int unsigned") - .addInsertValues("3428724653") - .addExpectedValues("3428724653") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("int") - .airbyteType(JsonSchemaType.NUMBER) - .fullSourceDataType("int zerofill") - .addInsertValues("1") - .addExpectedValues("1") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("bigint") - .airbyteType(JsonSchemaType.NUMBER) - .addInsertValues("null", "9223372036854775807") - .addExpectedValues(null, "9223372036854775807") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("float") - .airbyteType(JsonSchemaType.NUMBER) - .addInsertValues("null", "10.5") - .addExpectedValues(null, "10.5") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("double") - .airbyteType(JsonSchemaType.NUMBER) - .addInsertValues("null", "power(10, 308)", "1/power(10, 45)", "10.5") - .addExpectedValues(null, String.valueOf(Math.pow(10, 308)), String.valueOf(1 / Math.pow(10, 45)), "10.5") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("decimal") - .airbyteType(JsonSchemaType.NUMBER) - .fullSourceDataType("decimal(10,4)") - .addInsertValues("0.188", "null") - .addExpectedValues("0.1880", null) - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("decimal") - .airbyteType(JsonSchemaType.NUMBER) - .fullSourceDataType("decimal(19,2)") - .addInsertValues("1700000.00") - .addInsertValues("1700000.00") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("bit") - .airbyteType(JsonSchemaType.NUMBER) - .addInsertValues("null", "1", "0") - .addExpectedValues(null, "true", "false") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("date") - .airbyteType(JsonSchemaType.STRING) - .addInsertValues("null", "'2021-01-01'") - .addExpectedValues(null, "2021-01-01T00:00:00Z") - .build()); - - // Check Zero-date value for mandatory field - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("date") - .fullSourceDataType("date not null") - .airbyteType(JsonSchemaType.STRING) - .addInsertValues("'0000-00-00'") - .addExpectedValues("1970-01-01T00:00:00Z") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("datetime") - .airbyteType(JsonSchemaType.STRING) - .addInsertValues("null", "'2005-10-10 23:22:21'") - .addExpectedValues(null, "2005-10-10T23:22:21.000000Z") - .build()); - - // Check Zero-date value for mandatory field - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("datetime") - .fullSourceDataType("datetime not null") - .airbyteType(JsonSchemaType.STRING) - .addInsertValues("'0000-00-00 00:00:00'") - .addExpectedValues("1970-01-01T00:00:00Z") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("timestamp") - .airbyteType(JsonSchemaType.STRING) - .addInsertValues("null") - .addNullExpectedValue() - .build()); - - // Check Zero-date value for mandatory field - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("timestamp") - .fullSourceDataType("timestamp not null") - .airbyteType(JsonSchemaType.STRING) - .addInsertValues("'0000-00-00 00:00:00.000000'") - .addExpectedValues("1970-01-01T00:00:00Z") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("time") - .airbyteType(JsonSchemaType.STRING) - // JDBC driver can process only "clock"(00:00:00-23:59:59) values. - // https://debezium.io/documentation/reference/connectors/mysql.html#mysql-temporal-types - .addInsertValues("null", "'-23:59:59'", "'00:00:00'") - .addExpectedValues(null, "1970-01-01T23:59:59Z", "1970-01-01T00:00:00Z") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("varchar") - .airbyteType(JsonSchemaType.STRING) - .fullSourceDataType("varchar(256) character set cp1251") - .addInsertValues("null", "'тŠµŃŃ‚'") - .addExpectedValues(null, "тŠµŃŃ‚") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("varchar") - .airbyteType(JsonSchemaType.STRING) - .fullSourceDataType("varchar(256) character set utf16") - .addInsertValues("null", "0xfffd") - .addExpectedValues(null, "ļæ½") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("varchar") - .airbyteType(JsonSchemaType.STRING) - .fullSourceDataType("varchar(256)") - .addInsertValues("null", "'!\"#$%&\\'()*+,-./:;<=>?\\@[\\]^_\\`{|}~'") - .addExpectedValues(null, "!\"#$%&'()*+,-./:;<=>?@[]^_`{|}~") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("varbinary") - .airbyteType(JsonSchemaType.STRING_BASE_64) - .fullSourceDataType("varbinary(20000)") //// size should be enough to save test.png - .addInsertValues("null", "'test'", "'тŠµŃŃ‚'", String.format("FROM_BASE64('%s')", getFileDataInBase64())) - .addExpectedValues(null, "dGVzdA==", "0YLQtdGB0YI=", getFileDataInBase64()) - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("blob") - .airbyteType(JsonSchemaType.STRING_BASE_64) - .addInsertValues("null", "'test'", "'тŠµŃŃ‚'", String.format("FROM_BASE64('%s')", getFileDataInBase64())) - .addExpectedValues(null, "dGVzdA==", "0YLQtdGB0YI=", getFileDataInBase64()) - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("mediumtext") - .airbyteType(JsonSchemaType.STRING) - .addInsertValues(getLogString(1048000), "'test'", "'тŠµŃŃ‚'") - .addExpectedValues(StringUtils.leftPad("0", 1048000, "0"), "test", "тŠµŃŃ‚") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("tinytext") - .airbyteType(JsonSchemaType.STRING) - .addInsertValues("null", "'test'", "'тŠµŃŃ‚'") - .addExpectedValues(null, "test", "тŠµŃŃ‚") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("longtext") - .airbyteType(JsonSchemaType.STRING) - .addInsertValues("null", "'test'", "'тŠµŃŃ‚'") - .addExpectedValues(null, "test", "тŠµŃŃ‚") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("text") - .airbyteType(JsonSchemaType.STRING) - .addInsertValues("null", "'test'", "'тŠµŃŃ‚'") - .addExpectedValues(null, "test", "тŠµŃŃ‚") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("json") - .airbyteType(JsonSchemaType.STRING) - .addInsertValues("null", "'{\"a\": 10, \"b\": 15}'") - .addExpectedValues(null, "{\"a\": 10, \"b\": 15}") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("point") - .airbyteType(JsonSchemaType.OBJECT) - .addInsertValues("null", "(ST_GeomFromText('POINT(1 1)'))") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("bool") - .airbyteType(JsonSchemaType.STRING) - .addInsertValues("null", "1", "0") - .addExpectedValues(null, "true", "false") - .build()); - - } - - private String getLogString(final int length) { - final int maxLpadLength = 262144; - final StringBuilder stringBuilder = new StringBuilder("concat("); - final int fullChunks = length / maxLpadLength; - for (int i = 1; i <= fullChunks; i++) { - stringBuilder.append("lpad('0', 262144, '0'),"); - } - stringBuilder.append("lpad('0', ").append(length % maxLpadLength).append(", '0'))"); - return stringBuilder.toString(); - } - - private String getFileDataInBase64() { - final File file = new File(getClass().getClassLoader().getResource("test.png").getFile()); - try { - return Base64.encodeBase64String(FileUtils.readFileToByteArray(file)); - } catch (final IOException e) { - LOGGER.error(String.format("Fail to read the file: %s. Error: %s", file.getAbsoluteFile(), e.getMessage())); - } - return null; - } - } diff --git a/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/MySqlSourceDatatypeTest.java b/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/MySqlSourceDatatypeTest.java index c3267de852f9..839c55ebbda7 100644 --- a/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/MySqlSourceDatatypeTest.java +++ b/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/MySqlSourceDatatypeTest.java @@ -4,55 +4,24 @@ package io.airbyte.integrations.source.mysql; -import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; -import com.mysql.cj.MysqlType; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; import io.airbyte.db.factory.DSLContextFactory; import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.source.mysql.MySqlSource.ReplicationMethod; -import io.airbyte.integrations.standardtest.source.AbstractSourceDatabaseTypeTest; -import io.airbyte.integrations.standardtest.source.TestDataHolder; import io.airbyte.integrations.standardtest.source.TestDestinationEnv; -import io.airbyte.protocol.models.JsonSchemaType; -import java.io.File; -import java.io.IOException; import java.util.Map; -import java.util.Set; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import org.apache.commons.codec.binary.Base64; -import org.apache.commons.io.FileUtils; -import org.apache.commons.lang3.RandomStringUtils; -import org.apache.commons.lang3.StringUtils; import org.jooq.SQLDialect; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.testcontainers.containers.MySQLContainer; -public class MySqlSourceDatatypeTest extends AbstractSourceDatabaseTypeTest { - - private static final Logger LOGGER = LoggerFactory.getLogger(MySqlSourceDatatypeTest.class); - - private MySQLContainer container; - private JsonNode config; - - @Override - protected JsonNode getConfig() { - return config; - } +public class MySqlSourceDatatypeTest extends AbstractMySqlSourceDatatypeTest { @Override protected void tearDown(final TestDestinationEnv testEnv) { container.close(); } - @Override - protected String getImageName() { - return "airbyte/source-mysql:dev"; - } - @Override protected Database setupDatabase() throws Exception { container = new MySQLContainer<>("mysql:8.0"); @@ -86,405 +55,9 @@ protected Database setupDatabase() throws Exception { return database; } - @Override - protected String getNameSpace() { - return container.getDatabaseName(); - } - @Override public boolean testCatalog() { return true; } - @Override - protected void initTests() { - // bit defaults to bit(1), which is equivalent to boolean - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("bit") - .airbyteType(JsonSchemaType.BOOLEAN) - .addInsertValues("null", "1", "0") - .addExpectedValues(null, "true", "false") - .build()); - - // bit(1) is equivalent to boolean - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("bit") - .fullSourceDataType("bit(1)") - .airbyteType(JsonSchemaType.BOOLEAN) - .addInsertValues("null", "1", "0") - .addExpectedValues(null, "true", "false") - .build()); - - // bit(>1) is binary - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("bit") - .fullSourceDataType("bit(7)") - .airbyteType(JsonSchemaType.STRING_BASE_64) - // 1000001 is binary for A - .addInsertValues("null", "b'1000001'") - // QQo= is base64 encoding in charset UTF-8 for A - .addExpectedValues(null, "QQ==") - .build()); - - // tinyint without width - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("tinyint") - .airbyteType(JsonSchemaType.NUMBER) - .addInsertValues("null", "-128", "127") - .addExpectedValues(null, "-128", "127") - .build()); - - // tinyint(1) is equivalent to boolean - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("tinyint") - .fullSourceDataType("tinyint(1)") - .airbyteType(JsonSchemaType.BOOLEAN) - .addInsertValues("null", "1", "0") - .addExpectedValues(null, "true", "false") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("tinyint") - .fullSourceDataType("tinyint(2)") - .airbyteType(JsonSchemaType.NUMBER) - .addInsertValues("null", "-128", "127") - .addExpectedValues(null, "-128", "127") - .build()); - - final Set booleanTypes = Set.of("BOOLEAN", "BOOL"); - for (final String booleanType : booleanTypes) { - addDataTypeTestData( - TestDataHolder.builder() - .sourceType(booleanType) - .airbyteType(JsonSchemaType.BOOLEAN) - // MySql booleans are tinyint(1), and only 1 is true - .addInsertValues("null", "1", "0", "127", "-128") - .addExpectedValues(null, "true", "false", "false", "false") - .build()); - } - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("smallint") - .airbyteType(JsonSchemaType.NUMBER) - .addInsertValues("null", "-32768", "32767") - .addExpectedValues(null, "-32768", "32767") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("smallint") - .airbyteType(JsonSchemaType.NUMBER) - .fullSourceDataType("smallint zerofill") - .addInsertValues("1") - .addExpectedValues("1") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("smallint") - .airbyteType(JsonSchemaType.NUMBER) - .fullSourceDataType("smallint unsigned") - .addInsertValues("null", "0", "65535") - .addExpectedValues(null, "0", "65535") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("mediumint") - .airbyteType(JsonSchemaType.NUMBER) - .addInsertValues("null", "-8388608", "8388607") - .addExpectedValues(null, "-8388608", "8388607") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("mediumint") - .airbyteType(JsonSchemaType.NUMBER) - .fullSourceDataType("mediumint zerofill") - .addInsertValues("1") - .addExpectedValues("1") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("int") - .airbyteType(JsonSchemaType.NUMBER) - .addInsertValues("null", "-2147483648", "2147483647") - .addExpectedValues(null, "-2147483648", "2147483647") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("int") - .airbyteType(JsonSchemaType.NUMBER) - .fullSourceDataType("int unsigned") - .addInsertValues("3428724653") - .addExpectedValues("3428724653") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("int") - .airbyteType(JsonSchemaType.NUMBER) - .fullSourceDataType("int zerofill") - .addInsertValues("1") - .addExpectedValues("1") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("bigint") - .airbyteType(JsonSchemaType.NUMBER) - .addInsertValues("null", "9223372036854775807") - .addExpectedValues(null, "9223372036854775807") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("float") - .airbyteType(JsonSchemaType.NUMBER) - .addInsertValues("null", "10.5") - .addExpectedValues(null, "10.5") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("double") - .airbyteType(JsonSchemaType.NUMBER) - .addInsertValues("null", "power(10, 308)", "1/power(10, 45)", "10.5") - .addExpectedValues(null, String.valueOf(Math.pow(10, 308)), String.valueOf(1 / Math.pow(10, 45)), "10.5") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("decimal") - .airbyteType(JsonSchemaType.NUMBER) - .fullSourceDataType("decimal(10,4)") - .addInsertValues("0.188", "null") - .addExpectedValues("0.188", null) - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("decimal") - .airbyteType(JsonSchemaType.NUMBER) - .fullSourceDataType("decimal(19,2)") - .addInsertValues("1700000.01") - .addExpectedValues("1700000.01") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("date") - .airbyteType(JsonSchemaType.STRING) - .addInsertValues("null", "'2021-01-01'") - .addExpectedValues(null, "2021-01-01T00:00:00Z") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("datetime") - .airbyteType(JsonSchemaType.STRING) - .addInsertValues("null", "'2005-10-10 23:22:21'") - .addExpectedValues(null, "2005-10-10T23:22:21.000000Z") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("timestamp") - .airbyteType(JsonSchemaType.STRING) - .addInsertValues("null", "'2021-01-00'", "'2021-00-00'", "'0000-00-00'") - .addExpectedValues(null, null, null, null) - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("time") - .airbyteType(JsonSchemaType.STRING) - // JDBC driver can process only "clock"(00:00:00-23:59:59) values. - .addInsertValues("null", "'-23:59:59'", "'00:00:00'") - .addExpectedValues(null, "1970-01-01T23:59:59Z", "1970-01-01T00:00:00Z") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("year") - .airbyteType(JsonSchemaType.STRING) - // MySQL converts values in the ranges '0' - '69' to YEAR value in the range 2000 - 2069 - // and '70' - '99' to 1970 - 1999. - .addInsertValues("null", "'1997'", "'0'", "'50'", "'70'", "'80'", "'99'") - .addExpectedValues(null, "1997", "2000", "2050", "1970", "1980", "1999") - .build()); - - // char types can be string or binary, so they are tested separately - final Set charTypes = Stream.of(MysqlType.CHAR, MysqlType.VARCHAR) - .map(Enum::name) - .collect(Collectors.toSet()); - for (final String charType : charTypes) { - addDataTypeTestData( - TestDataHolder.builder() - .sourceType(charType) - .airbyteType(JsonSchemaType.STRING) - .fullSourceDataType(charType + "(63)") - .addInsertValues("null", "'Airbyte'", "'!\"#$%&\\'()*+,-./:;<=>?\\@[\\]^_\\`{|}~'") - .addExpectedValues(null, "Airbyte", "!\"#$%&'()*+,-./:;<=>?@[]^_`{|}~") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType(charType) - .airbyteType(JsonSchemaType.STRING) - .fullSourceDataType(charType + "(63) character set utf16") - .addInsertValues("0xfffd") - .addExpectedValues("ļæ½") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType(charType) - .airbyteType(JsonSchemaType.STRING) - .fullSourceDataType(charType + "(63) character set cp1251") - .addInsertValues("'тŠµŃŃ‚'") - .addExpectedValues("тŠµŃŃ‚") - .build()); - - // when charset is binary, return binary in base64 encoding in charset UTF-8 - addDataTypeTestData( - TestDataHolder.builder() - .sourceType(charType) - .airbyteType(JsonSchemaType.STRING_BASE_64) - .fullSourceDataType(charType + "(7) character set binary") - .addInsertValues("null", "'Airbyte'") - .addExpectedValues(null, "QWlyYnl0ZQ==") - .build()); - } - - final Set blobTypes = Stream - .of(MysqlType.TINYBLOB, MysqlType.BLOB, MysqlType.MEDIUMBLOB, MysqlType.LONGBLOB) - .map(Enum::name) - .collect(Collectors.toSet()); - for (final String blobType : blobTypes) { - addDataTypeTestData( - TestDataHolder.builder() - .sourceType(blobType) - .airbyteType(JsonSchemaType.STRING_BASE_64) - .addInsertValues("null", "'Airbyte'") - .addExpectedValues(null, "QWlyYnl0ZQ==") - .build()); - } - - // binary appends '\0' to the end of the string - addDataTypeTestData( - TestDataHolder.builder() - .sourceType(MysqlType.BINARY.name()) - .fullSourceDataType(MysqlType.BINARY.name() + "(10)") - .airbyteType(JsonSchemaType.STRING_BASE_64) - .addInsertValues("null", "'Airbyte'") - .addExpectedValues(null, "QWlyYnl0ZQAAAA==") - .build()); - - // varbinary does not append '\0' to the end of the string - addDataTypeTestData( - TestDataHolder.builder() - .sourceType(MysqlType.VARBINARY.name()) - .fullSourceDataType(MysqlType.VARBINARY.name() + "(10)") - .airbyteType(JsonSchemaType.STRING_BASE_64) - .addInsertValues("null", "'Airbyte'") - .addExpectedValues(null, "QWlyYnl0ZQ==") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType(MysqlType.VARBINARY.name()) - .airbyteType(JsonSchemaType.STRING_BASE_64) - .fullSourceDataType(MysqlType.VARBINARY.name() + "(20000)") // size should be enough to save test.png - .addInsertValues("null", "'test'", "'тŠµŃŃ‚'", String.format("FROM_BASE64('%s')", getFileDataInBase64())) - .addExpectedValues(null, "dGVzdA==", "0YLQtdGB0YI=", getFileDataInBase64()) - .build()); - - final Set textTypes = Stream - .of(MysqlType.TINYTEXT, MysqlType.TEXT, MysqlType.MEDIUMTEXT, MysqlType.LONGTEXT) - .map(Enum::name) - .collect(Collectors.toSet()); - final String randomText = RandomStringUtils.random(50, true, true); - for (final String textType : textTypes) { - addDataTypeTestData( - TestDataHolder.builder() - .sourceType(textType) - .airbyteType(JsonSchemaType.STRING) - .addInsertValues("null", "'Airbyte'", String.format("'%s'", randomText)) - .addExpectedValues(null, "Airbyte", randomText) - .build()); - } - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("mediumtext") - .airbyteType(JsonSchemaType.STRING) - .addInsertValues(getLogString(1048000), "'test'") - .addExpectedValues(StringUtils.leftPad("0", 1048000, "0"), "test") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("json") - .airbyteType(JsonSchemaType.STRING) - .addInsertValues("null", "'{\"a\": 10, \"b\": 15}'") - .addExpectedValues(null, "{\"a\": 10, \"b\": 15}") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("enum") - .fullSourceDataType("ENUM('xs', 's', 'm', 'l', 'xl')") - .airbyteType(JsonSchemaType.STRING) - .addInsertValues("null", "'xs'", "'m'") - .addExpectedValues(null, "xs", "m") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("set") - .fullSourceDataType("SET('xs', 's', 'm', 'l', 'xl')") - .airbyteType(JsonSchemaType.STRING) - .addInsertValues("null", "'xs,s'", "'m,xl'") - .addExpectedValues(null, "xs,s", "m,xl") - .build()); - - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("point") - .airbyteType(JsonSchemaType.STRING_BASE_64) - .addInsertValues("null", "(ST_GeomFromText('POINT(19 43)'))") - .addExpectedValues(null, "AAAAAAEBAAAAAAAAAAAAM0AAAAAAAIBFQA==") - .build()); - - } - - private String getLogString(final int length) { - final int maxLpadLength = 262144; - final StringBuilder stringBuilder = new StringBuilder("concat("); - final int fullChunks = length / maxLpadLength; - stringBuilder.append("lpad('0', 262144, '0'),".repeat(fullChunks)); - stringBuilder.append("lpad('0', ").append(length % maxLpadLength).append(", '0'))"); - return stringBuilder.toString(); - } - - private String getFileDataInBase64() { - final File file = new File(getClass().getClassLoader().getResource("test.png").getFile()); - try { - return Base64.encodeBase64String(FileUtils.readFileToByteArray(file)); - } catch (final IOException e) { - LOGGER.error(String.format("Fail to read the file: %s. Error: %s", file.getAbsoluteFile(), e.getMessage())); - } - return null; - } - } diff --git a/airbyte-integrations/connectors/source-openweather/Dockerfile b/airbyte-integrations/connectors/source-openweather/Dockerfile index b344b066bd47..264f36fd53b7 100644 --- a/airbyte-integrations/connectors/source-openweather/Dockerfile +++ b/airbyte-integrations/connectors/source-openweather/Dockerfile @@ -34,5 +34,5 @@ COPY source_openweather ./source_openweather ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.4 +LABEL io.airbyte.version=0.1.5 LABEL io.airbyte.name=airbyte/source-openweather diff --git a/airbyte-integrations/connectors/source-oracle-strict-encrypt/build.gradle b/airbyte-integrations/connectors/source-oracle-strict-encrypt/build.gradle index d110d186ae93..ed3602b5cf9a 100644 --- a/airbyte-integrations/connectors/source-oracle-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/source-oracle-strict-encrypt/build.gradle @@ -27,7 +27,7 @@ dependencies { testImplementation project(':airbyte-test-utils') testImplementation 'org.apache.commons:commons-lang3:3.11' - testImplementation libs.testcontainers.oracle.xe + testImplementation libs.connectors.source.testcontainers.oracle.xe integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') diff --git a/airbyte-integrations/connectors/source-oracle-strict-encrypt/src/test/java/io/airbyte/integrations/source/oracle_strict_encrypt/OracleStrictEncryptJdbcSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-oracle-strict-encrypt/src/test/java/io/airbyte/integrations/source/oracle_strict_encrypt/OracleStrictEncryptJdbcSourceAcceptanceTest.java index 4451aa0e6b4a..e82d20cc358d 100644 --- a/airbyte-integrations/connectors/source-oracle-strict-encrypt/src/test/java/io/airbyte/integrations/source/oracle_strict_encrypt/OracleStrictEncryptJdbcSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-oracle-strict-encrypt/src/test/java/io/airbyte/integrations/source/oracle_strict_encrypt/OracleStrictEncryptJdbcSourceAcceptanceTest.java @@ -77,7 +77,8 @@ static void init() { ID_VALUE_5 = new BigDecimal(5); ORACLE_DB = new OracleContainer("epiclabs/docker-oracle-xe-11g") - .withEnv("NLS_DATE_FORMAT", "YYYY-MM-DD"); + .withEnv("NLS_DATE_FORMAT", "YYYY-MM-DD") + .withEnv("RELAX_SECURITY", "1"); ORACLE_DB.start(); } @@ -115,6 +116,17 @@ public void tearDownOracle() throws Exception { Thread.sleep(1000); } + protected void incrementalDateCheck() throws Exception { + // https://stackoverflow.com/questions/47712930/resultset-meta-data-return-timestamp-instead-of-date-oracle-jdbc + // Oracle DATE is a java.sql.Timestamp (java.sql.Types.TIMESTAMP) as far as JDBC (and the SQL + // standard) is concerned as it has both a date and time component. + incrementalCursorCheck( + COL_UPDATED_AT, + "2005-10-18T00:00:00.000000Z", + "2006-10-19T00:00:00.000000Z", + Lists.newArrayList(getTestMessages().get(1), getTestMessages().get(2))); + } + void cleanUpTables() throws SQLException { final Connection conn = DriverManager.getConnection( ORACLE_DB.getJdbcUrl(), diff --git a/airbyte-integrations/connectors/source-oracle/Dockerfile b/airbyte-integrations/connectors/source-oracle/Dockerfile index cfc0c7aeaa39..f543c204501b 100644 --- a/airbyte-integrations/connectors/source-oracle/Dockerfile +++ b/airbyte-integrations/connectors/source-oracle/Dockerfile @@ -8,5 +8,5 @@ ENV TZ UTC COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar RUN tar xf ${APPLICATION}.tar --strip-components=1 -LABEL io.airbyte.version=0.3.15 +LABEL io.airbyte.version=0.3.17 LABEL io.airbyte.name=airbyte/source-oracle diff --git a/airbyte-integrations/connectors/source-oracle/build.gradle b/airbyte-integrations/connectors/source-oracle/build.gradle index d9167eabe309..1b961a1f51b6 100644 --- a/airbyte-integrations/connectors/source-oracle/build.gradle +++ b/airbyte-integrations/connectors/source-oracle/build.gradle @@ -23,10 +23,11 @@ dependencies { implementation "com.oracle.database.jdbc:ojdbc8-production:19.7.0.0" testImplementation testFixtures(project(':airbyte-integrations:connectors:source-jdbc')) + testImplementation project(":airbyte-json-validation") testImplementation project(':airbyte-test-utils') testImplementation 'org.apache.commons:commons-lang3:3.11' - testImplementation 'org.testcontainers:oracle-xe:1.16.0' + testImplementation libs.connectors.source.testcontainers.oracle.xe integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') diff --git a/airbyte-integrations/connectors/source-oracle/src/main/java/io/airbyte/integrations/source/oracle/OracleSource.java b/airbyte-integrations/connectors/source-oracle/src/main/java/io/airbyte/integrations/source/oracle/OracleSource.java index ebac714f5b27..2ef6e39d1b45 100644 --- a/airbyte-integrations/connectors/source-oracle/src/main/java/io/airbyte/integrations/source/oracle/OracleSource.java +++ b/airbyte-integrations/connectors/source-oracle/src/main/java/io/airbyte/integrations/source/oracle/OracleSource.java @@ -93,6 +93,11 @@ public JsonNode toDatabaseConfig(final JsonNode config) { schemas.add(schema.asText()); } } + + if (config.get("jdbc_url_params") != null && !config.get("jdbc_url_params").asText().isEmpty()) { + additionalParameters.addAll(List.of(config.get("jdbc_url_params").asText().split("&"))); + } + if (!additionalParameters.isEmpty()) { final String connectionParams = String.join(getJdbcParameterDelimiter(), additionalParameters); configBuilder.put("connection_properties", connectionParams); diff --git a/airbyte-integrations/connectors/source-oracle/src/main/resources/spec.json b/airbyte-integrations/connectors/source-oracle/src/main/resources/spec.json index 31e822b91c16..4ad4d6aecc65 100644 --- a/airbyte-integrations/connectors/source-oracle/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/source-oracle/src/main/resources/spec.json @@ -10,7 +10,8 @@ "host": { "title": "Host", "description": "Hostname of the database.", - "type": "string" + "type": "string", + "order": 1 }, "port": { "title": "Port", @@ -18,22 +19,26 @@ "type": "integer", "minimum": 0, "maximum": 65536, - "default": 1521 + "default": 1521, + "order": 2 }, "sid": { "title": "SID (Oracle System Identifier)", - "type": "string" + "type": "string", + "order": 3 }, "username": { "title": "User", "description": "The username which is used to access the database.", - "type": "string" + "type": "string", + "order": 4 }, "password": { "title": "Password", "description": "The password associated with the username.", "type": "string", - "airbyte_secret": true + "airbyte_secret": true, + "order": 5 }, "schemas": { "title": "Schemas", @@ -43,13 +48,20 @@ "type": "string" }, "minItems": 1, - "uniqueItems": true + "uniqueItems": true, + "order": 6 + }, + "jdbc_url_params": { + "title": "JDBC URL Params", + "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).", + "type": "string", + "order": 7 }, "encryption": { "title": "Encryption", "type": "object", "description": "The encryption method with is used when communicating with the database.", - "order": 6, + "order": 8, "oneOf": [ { "title": "Unencrypted", diff --git a/airbyte-integrations/connectors/source-oracle/src/test-integration/java/io/airbyte/integrations/source/oracle/AbstractSshOracleSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-oracle/src/test-integration/java/io/airbyte/integrations/source/oracle/AbstractSshOracleSourceAcceptanceTest.java index 14b201f69af1..dc96e6b1f3ef 100644 --- a/airbyte-integrations/connectors/source-oracle/src/test-integration/java/io/airbyte/integrations/source/oracle/AbstractSshOracleSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-oracle/src/test-integration/java/io/airbyte/integrations/source/oracle/AbstractSshOracleSourceAcceptanceTest.java @@ -34,6 +34,7 @@ public abstract class AbstractSshOracleSourceAcceptanceTest extends SourceAccept private static final String STREAM_NAME = "JDBC_SPACE.ID_AND_NAME"; private static final String STREAM_NAME2 = "JDBC_SPACE.STARSHIPS"; + private static final Network network = Network.newNetwork(); private final SshBastionContainer sshBastionContainer = new SshBastionContainer(); private OracleContainer db; @@ -84,7 +85,7 @@ protected void tearDown(final TestDestinationEnv testEnv) { } private void startTestContainers() { - sshBastionContainer.initAndStartBastion(); + sshBastionContainer.initAndStartBastion(network); initAndStartJdbcContainer(); } @@ -93,7 +94,7 @@ private void initAndStartJdbcContainer() { .withUsername("test") .withPassword("oracle") .usingSid() - .withNetwork(sshBastionContainer.getNetWork());; + .withNetwork(network); db.start(); } @@ -111,7 +112,7 @@ public ImmutableMap.Builder getBasicOracleDbConfigBuider(final O return ImmutableMap.builder() .put("host", Objects.requireNonNull(db.getContainerInfo().getNetworkSettings() .getNetworks() - .get(((Network.NetworkImpl) sshBastionContainer.getNetWork()).getName()) + .get(((Network.NetworkImpl) network).getName()) .getIpAddress())) .put("username", db.getUsername()) .put("password", db.getPassword()) diff --git a/airbyte-integrations/connectors/source-oracle/src/test-integration/java/io/airbyte/integrations/source/oracle/OracleSourceDatatypeTest.java b/airbyte-integrations/connectors/source-oracle/src/test-integration/java/io/airbyte/integrations/source/oracle/OracleSourceDatatypeTest.java index d91292df0300..af2e4262575f 100644 --- a/airbyte-integrations/connectors/source-oracle/src/test-integration/java/io/airbyte/integrations/source/oracle/OracleSourceDatatypeTest.java +++ b/airbyte-integrations/connectors/source-oracle/src/test-integration/java/io/airbyte/integrations/source/oracle/OracleSourceDatatypeTest.java @@ -37,7 +37,8 @@ public class OracleSourceDatatypeTest extends AbstractSourceDatabaseTypeTest { @Override protected Database setupDatabase() throws Exception { - container = new OracleContainer("epiclabs/docker-oracle-xe-11g"); + container = new OracleContainer("epiclabs/docker-oracle-xe-11g") + .withEnv("RELAX_SECURITY", "1"); container.start(); config = Jsons.jsonNode(ImmutableMap.builder() diff --git a/airbyte-integrations/connectors/source-oracle/src/test/java/io/airbyte/integrations/source/oracle/OracleJdbcSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-oracle/src/test/java/io/airbyte/integrations/source/oracle/OracleJdbcSourceAcceptanceTest.java index 8ee02fc72054..c50f7679bb69 100644 --- a/airbyte-integrations/connectors/source-oracle/src/test/java/io/airbyte/integrations/source/oracle/OracleJdbcSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-oracle/src/test/java/io/airbyte/integrations/source/oracle/OracleJdbcSourceAcceptanceTest.java @@ -5,6 +5,7 @@ package io.airbyte.integrations.source.oracle; import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; import static org.junit.jupiter.api.Assertions.assertTrue; import com.fasterxml.jackson.databind.JsonNode; @@ -23,6 +24,7 @@ import io.airbyte.protocol.models.AirbyteMessage.Type; import io.airbyte.protocol.models.AirbyteRecordMessage; import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.ConnectorSpecification; import io.airbyte.protocol.models.DestinationSyncMode; @@ -76,7 +78,8 @@ static void init() { ID_VALUE_5 = new BigDecimal(5); ORACLE_DB = new OracleContainer("epiclabs/docker-oracle-xe-11g") - .withEnv("NLS_DATE_FORMAT", "YYYY-MM-DD"); + .withEnv("NLS_DATE_FORMAT", "YYYY-MM-DD") + .withEnv("RELAX_SECURITY", "1"); ORACLE_DB.start(); } @@ -97,6 +100,17 @@ public void setup() throws Exception { super.setup(); } + protected void incrementalDateCheck() throws Exception { + // https://stackoverflow.com/questions/47712930/resultset-meta-data-return-timestamp-instead-of-date-oracle-jdbc + // Oracle DATE is a java.sql.Timestamp (java.sql.Types.TIMESTAMP) as far as JDBC (and the SQL + // standard) is concerned as it has both a date and time component. + incrementalCursorCheck( + COL_UPDATED_AT, + "2005-10-18T00:00:00.000000Z", + "2006-10-19T00:00:00.000000Z", + Lists.newArrayList(getTestMessages().get(1), getTestMessages().get(2))); + } + @AfterEach public void tearDownOracle() throws Exception { // ORA-12519 @@ -215,6 +229,7 @@ void testReadOneTableIncrementallyTwice() throws Exception { expectedMessages.add(new AirbyteMessage() .withType(Type.STATE) .withState(new AirbyteStateMessage() + .withType(AirbyteStateType.LEGACY) .withData(Jsons.jsonNode(new DbState() .withCdc(false) .withStreams(Lists.newArrayList(new DbStreamState() @@ -225,7 +240,7 @@ void testReadOneTableIncrementallyTwice() throws Exception { setEmittedAtToNull(actualMessagesSecondSync); - assertTrue(expectedMessages.size() == actualMessagesSecondSync.size()); + assertArrayEquals(expectedMessages.toArray(), actualMessagesSecondSync.toArray()); assertTrue(expectedMessages.containsAll(actualMessagesSecondSync)); assertTrue(actualMessagesSecondSync.containsAll(expectedMessages)); } diff --git a/airbyte-integrations/connectors/source-oracle/src/test/java/io/airbyte/integrations/source/oracle/OracleSourceTest.java b/airbyte-integrations/connectors/source-oracle/src/test/java/io/airbyte/integrations/source/oracle/OracleSourceTest.java index 20459999edce..4ef627107197 100644 --- a/airbyte-integrations/connectors/source-oracle/src/test/java/io/airbyte/integrations/source/oracle/OracleSourceTest.java +++ b/airbyte-integrations/connectors/source-oracle/src/test/java/io/airbyte/integrations/source/oracle/OracleSourceTest.java @@ -58,7 +58,8 @@ class OracleSourceTest { @BeforeAll static void init() { - ORACLE_DB = new OracleContainer("epiclabs/docker-oracle-xe-11g"); + ORACLE_DB = new OracleContainer("epiclabs/docker-oracle-xe-11g") + .withEnv("RELAX_SECURITY", "1"); ORACLE_DB.start(); } diff --git a/airbyte-integrations/connectors/source-oracle/src/test/java/io/airbyte/integrations/source/oracle/OracleSpecTest.java b/airbyte-integrations/connectors/source-oracle/src/test/java/io/airbyte/integrations/source/oracle/OracleSpecTest.java new file mode 100644 index 000000000000..90facba583d0 --- /dev/null +++ b/airbyte-integrations/connectors/source-oracle/src/test/java/io/airbyte/integrations/source/oracle/OracleSpecTest.java @@ -0,0 +1,117 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.oracle; + +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import io.airbyte.commons.io.IOs; +import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.resources.MoreResources; +import io.airbyte.protocol.models.ConnectorSpecification; +import io.airbyte.validation.json.JsonSchemaValidator; +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; + +/** + * Tests that the Oracle spec passes JsonSchema validation. While this may seem like overkill, we + * are doing it because there are some gotchas in correctly configuring the oneOf. + */ +public class OracleSpecTest { + + private static final String CONFIGURATION = """ + { + "host": "localhost", + "port": 1521, + "sid": "ora_db", + "username": "ora", + "password": "pwd", + "schemas": [ + "public" + ], + "jdbc_url_params": "property1=pValue1&property2=pValue2" + } + """; + + private static JsonNode schema; + private static JsonSchemaValidator validator; + + @BeforeAll + static void init() throws IOException { + final String spec = MoreResources.readResource("spec.json"); + final File schemaFile = IOs.writeFile(Files.createTempDirectory(Path.of("/tmp"), "pg-spec-test"), "schema.json", spec).toFile(); + schema = JsonSchemaValidator.getSchema(schemaFile).get("connectionSpecification"); + validator = new JsonSchemaValidator(); + } + + @Test + void testHostMissing() { + final JsonNode config = Jsons.deserialize(CONFIGURATION); + ((ObjectNode) config).remove("host"); + assertFalse(validator.test(schema, config)); + } + + @Test + void testPortMissing() { + final JsonNode config = Jsons.deserialize(CONFIGURATION); + ((ObjectNode) config).remove("port"); + assertFalse(validator.test(schema, config)); + } + + @Test + void testSsidMissing() { + final JsonNode config = Jsons.deserialize(CONFIGURATION); + ((ObjectNode) config).remove("sid"); + assertFalse(validator.test(schema, config)); + } + + @Test + void testUsernameMissing() { + final JsonNode config = Jsons.deserialize(CONFIGURATION); + ((ObjectNode) config).remove("username"); + assertFalse(validator.test(schema, config)); + } + + @Test + void testPasswordMissing() { + final JsonNode config = Jsons.deserialize(CONFIGURATION); + ((ObjectNode) config).remove("password"); + assertTrue(validator.test(schema, config)); + } + + @Test + void testSchemaMissing() { + final JsonNode config = Jsons.deserialize(CONFIGURATION); + ((ObjectNode) config).remove("schemas"); + assertTrue(validator.test(schema, config)); + } + + @Test + void testAdditionalJdbcParamMissing() { + final JsonNode config = Jsons.deserialize(CONFIGURATION); + ((ObjectNode) config).remove("jdbc_url_params"); + assertTrue(validator.test(schema, config)); + } + + @Test + void testWithJdbcAdditionalProperty() { + final JsonNode config = Jsons.deserialize(CONFIGURATION); + assertTrue(validator.test(schema, config)); + } + + @Test + void testJdbcAdditionalProperty() throws Exception { + final ConnectorSpecification spec = new OracleSource().spec(); + assertNotNull(spec.getConnectionSpecification().get("properties").get("jdbc_url_params")); + } + +} diff --git a/airbyte-integrations/connectors/source-oracle/src/test/java/io/airbyte/integrations/source/oracle/OracleStressTest.java b/airbyte-integrations/connectors/source-oracle/src/test/java/io/airbyte/integrations/source/oracle/OracleStressTest.java index ce74183ffea8..4fa824958e41 100644 --- a/airbyte-integrations/connectors/source-oracle/src/test/java/io/airbyte/integrations/source/oracle/OracleStressTest.java +++ b/airbyte-integrations/connectors/source-oracle/src/test/java/io/airbyte/integrations/source/oracle/OracleStressTest.java @@ -45,7 +45,8 @@ static void init() { COL_ID_TYPE = "NUMBER(38,0)"; INSERT_STATEMENT = "INTO id_and_name (id, name) VALUES (%s,'picard-%s')"; - ORACLE_DB = new OracleContainer("epiclabs/docker-oracle-xe-11g"); + ORACLE_DB = new OracleContainer("epiclabs/docker-oracle-xe-11g") + .withEnv("RELAX_SECURITY", "1"); ORACLE_DB.start(); } diff --git a/airbyte-integrations/connectors/source-orbit/.dockerignore b/airbyte-integrations/connectors/source-orbit/.dockerignore new file mode 100644 index 000000000000..694d552c2125 --- /dev/null +++ b/airbyte-integrations/connectors/source-orbit/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_orbit +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-orbit/Dockerfile b/airbyte-integrations/connectors/source-orbit/Dockerfile new file mode 100644 index 000000000000..d20a9c53aa90 --- /dev/null +++ b/airbyte-integrations/connectors/source-orbit/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_orbit ./source_orbit + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.1 +LABEL io.airbyte.name=airbyte/source-orbit diff --git a/airbyte-integrations/connectors/source-orbit/README.md b/airbyte-integrations/connectors/source-orbit/README.md new file mode 100644 index 000000000000..bb71b2d2e912 --- /dev/null +++ b/airbyte-integrations/connectors/source-orbit/README.md @@ -0,0 +1,132 @@ +# Orbit Source + +This is the repository for the Orbit source connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/orbit). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.9.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +pip install '.[tests]' +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-orbit:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/orbit) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_orbit/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source orbit test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py discover --config secrets/config.json +python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-orbit:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-orbit:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-orbit:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-orbit:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-orbit:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-orbit:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing +Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. +First install test dependencies into your virtual environment: +``` +pip install .[tests] +``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +python -m pytest unit_tests +``` + +### Integration Tests +There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). +#### Custom Integration tests +Place custom tests inside `integration_tests/` folder, then, from the connector root, run +``` +python -m pytest integration_tests +``` +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. +To run your integration tests with acceptance tests, from the connector root, run +``` +python -m pytest integration_tests -p integration_tests.acceptance +``` +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-orbit:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-orbit:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-orbit/acceptance-test-config.yml b/airbyte-integrations/connectors/source-orbit/acceptance-test-config.yml new file mode 100644 index 000000000000..a00444b24a6d --- /dev/null +++ b/airbyte-integrations/connectors/source-orbit/acceptance-test-config.yml @@ -0,0 +1,20 @@ +# See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-orbit:dev +tests: + spec: + - spec_path: "source_orbit/spec.yaml" + connection: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + - config_path: "secrets/config.json" + basic_read: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + full_refresh: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-orbit/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-orbit/acceptance-test-docker.sh new file mode 100644 index 000000000000..c51577d10690 --- /dev/null +++ b/airbyte-integrations/connectors/source-orbit/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-orbit/build.gradle b/airbyte-integrations/connectors/source-orbit/build.gradle new file mode 100644 index 000000000000..198305c2ab13 --- /dev/null +++ b/airbyte-integrations/connectors/source-orbit/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_orbit' +} diff --git a/airbyte-integrations/connectors/source-orbit/integration_tests/__init__.py b/airbyte-integrations/connectors/source-orbit/integration_tests/__init__.py new file mode 100644 index 000000000000..1100c1c58cf5 --- /dev/null +++ b/airbyte-integrations/connectors/source-orbit/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-orbit/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-orbit/integration_tests/abnormal_state.json new file mode 100644 index 000000000000..52b0f2c2118f --- /dev/null +++ b/airbyte-integrations/connectors/source-orbit/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "todo-abnormal-value" + } +} diff --git a/airbyte-integrations/connectors/source-orbit/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-orbit/integration_tests/acceptance.py new file mode 100644 index 000000000000..1302b2f57e10 --- /dev/null +++ b/airbyte-integrations/connectors/source-orbit/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-orbit/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-orbit/integration_tests/configured_catalog.json new file mode 100644 index 000000000000..a0d0091af9f8 --- /dev/null +++ b/airbyte-integrations/connectors/source-orbit/integration_tests/configured_catalog.json @@ -0,0 +1,44 @@ +{ + "streams": [ + { + "stream": { + "name": "members", + "json_schema": { + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "api_token": { + "type": "string" + }, + "workspace": { + "type": "string" + } + } + }, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "workspace", + "json_schema": { + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "api_token": { + "type": "string" + }, + "workspace": { + "type": "string" + } + } + }, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-orbit/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-orbit/integration_tests/invalid_config.json new file mode 100644 index 000000000000..7e719f4a39a1 --- /dev/null +++ b/airbyte-integrations/connectors/source-orbit/integration_tests/invalid_config.json @@ -0,0 +1 @@ +{ "api_token": "obw_token", "workspace": "airbyte" } diff --git a/airbyte-integrations/connectors/source-orbit/main.py b/airbyte-integrations/connectors/source-orbit/main.py new file mode 100644 index 000000000000..a7b3c933efac --- /dev/null +++ b/airbyte-integrations/connectors/source-orbit/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_orbit import SourceOrbit + +if __name__ == "__main__": + source = SourceOrbit() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-orbit/requirements.txt b/airbyte-integrations/connectors/source-orbit/requirements.txt new file mode 100644 index 000000000000..0411042aa091 --- /dev/null +++ b/airbyte-integrations/connectors/source-orbit/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-orbit/setup.py b/airbyte-integrations/connectors/source-orbit/setup.py new file mode 100644 index 000000000000..00a30d789955 --- /dev/null +++ b/airbyte-integrations/connectors/source-orbit/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1.56", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_orbit", + description="Source implementation for Orbit.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-orbit/source_orbit/__init__.py b/airbyte-integrations/connectors/source-orbit/source_orbit/__init__.py new file mode 100644 index 000000000000..4888354eaa19 --- /dev/null +++ b/airbyte-integrations/connectors/source-orbit/source_orbit/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceOrbit + +__all__ = ["SourceOrbit"] diff --git a/airbyte-integrations/connectors/source-orbit/source_orbit/schemas/members.json b/airbyte-integrations/connectors/source-orbit/source_orbit/schemas/members.json new file mode 100644 index 000000000000..eac6de53806e --- /dev/null +++ b/airbyte-integrations/connectors/source-orbit/source_orbit/schemas/members.json @@ -0,0 +1,155 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "fake": { + "type": "string" + }, + "type": { + "type": "string" + }, + "attributes": { + "type": "object", + "properties": { + "activities_count": { + "type": "integer" + }, + "activities_score": { + "type": "integer" + }, + "avatar_url": { + "type": ["null", "string"] + }, + "bio": { + "type": ["null", "string"] + }, + "birthday": { + "type": ["null", "string"] + }, + "company": { + "type": ["null", "string"] + }, + "title": { + "type": ["null", "string"] + }, + "created_at": { + "type": ["null", "string"] + }, + "deleted_at": { + "type": ["null", "string"] + }, + "first_activity_occurred_at": { + "type": ["null", "string"] + }, + "last_activity_occurred_at": { + "type": ["null", "string"] + }, + "location": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "pronouns": { + "type": ["null", "string"] + }, + "reach": { + "type": ["null", "integer"] + }, + "shipping_address": { + "type": ["null", "string"] + }, + "slug": { + "type": ["null", "string"] + }, + "source": { + "type": ["null", "string"] + }, + "tag_list": { + "type": ["null", "array"], + "items": { + "type": "string" + } + }, + "tags": { + "type": ["null", "array"], + "items": { + "type": "string" + } + }, + "teammate": { + "type": "boolean" + }, + "tshirt": { + "type": ["null", "string"] + }, + "updated_at": { + "type": ["null", "string"] + }, + "merged_at": { + "type": ["null", "string"] + }, + "url": { + "type": ["null", "string"] + }, + "orbit_url": { + "type": ["null", "string"] + }, + "created": { + "type": "boolean" + }, + "id": { + "type": "string" + }, + "orbit_level": { + "type": ["null", "integer"] + }, + "love": { + "type": ["null", "string"] + }, + "twitter": { + "type": ["null", "string"] + }, + "github": { + "type": ["null", "string"] + }, + "discourse": { + "type": ["null", "string"] + }, + "email": { + "type": ["null", "string"] + }, + "devto": { + "type": ["null", "string"] + }, + "linkedin": { + "type": ["null", "string"] + }, + "discord": { + "type": ["null", "string"] + }, + "github_followers": { + "type": ["null", "integer"] + }, + "twitter_followers": { + "type": ["null", "integer"] + }, + "topics": { + "type": ["null", "array"], + "items": { + "type": "string" + } + }, + "languages": { + "type": ["null", "array"], + "items": { + "type": "string" + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-orbit/source_orbit/schemas/workspace.json b/airbyte-integrations/connectors/source-orbit/source_orbit/schemas/workspace.json new file mode 100644 index 000000000000..79a2ed3d7982 --- /dev/null +++ b/airbyte-integrations/connectors/source-orbit/source_orbit/schemas/workspace.json @@ -0,0 +1,38 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "type": { + "type": "string" + }, + "attributes": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "slug": { + "type": "string" + }, + "updated_at": { + "type": "string" + }, + "created_at": { + "type": "string" + }, + "members_count": { + "type": "integer" + }, + "activities_count": { + "type": "integer" + }, + "tags": { + "type": "object" + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-orbit/source_orbit/source.py b/airbyte-integrations/connectors/source-orbit/source_orbit/source.py new file mode 100644 index 000000000000..280fff7d9067 --- /dev/null +++ b/airbyte-integrations/connectors/source-orbit/source_orbit/source.py @@ -0,0 +1,37 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from typing import Any, List, Mapping, Tuple + +from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.streams import Stream +from airbyte_cdk.sources.streams.http.requests_native_auth import TokenAuthenticator + +from .streams import Members, Workspace + + +# Source +class SourceOrbit(AbstractSource): + def check_connection(self, logger, config) -> Tuple[bool, any]: + try: + workspace_stream = Workspace( + authenticator=TokenAuthenticator(token=config["api_token"]), + workspace=config["workspace"], + ) + next(workspace_stream.read_records(sync_mode=SyncMode.full_refresh)) + return True, None + except Exception as e: + return False, f"Please check that your API key and workspace name are entered correctly: {repr(e)}" + + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + + stream_kwargs = { + "authenticator": TokenAuthenticator(config["api_token"]), + "workspace": config["workspace"], + "start_date": config["start_date"], + } + + return [Members(**stream_kwargs), Workspace(**stream_kwargs)] diff --git a/airbyte-integrations/connectors/source-orbit/source_orbit/spec.yaml b/airbyte-integrations/connectors/source-orbit/source_orbit/spec.yaml new file mode 100644 index 000000000000..8277b6d61539 --- /dev/null +++ b/airbyte-integrations/connectors/source-orbit/source_orbit/spec.yaml @@ -0,0 +1,29 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/orbit +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Orbit Source Spec + type: object + required: + - api_token + - workspace + additionalProperties: false + properties: + api_token: + type: string + airbyte_secret: true + title: API Token + description: Authorizes you to work with Orbit workspaces associated with the token. + order: 0 + workspace: + type: string + title: Workspace + description: The unique name of the workspace that your API token is associated with. + order: 1 + start_date: + type: string + title: Start Date + description: >- + Date in the format 2022-06-26. Only load members whose last activities are after this date. + pattern: >- + ^[0-9]{4}-[0-9]{2}-[0-9]{2}$ + order: 2 diff --git a/airbyte-integrations/connectors/source-orbit/source_orbit/streams.py b/airbyte-integrations/connectors/source-orbit/source_orbit/streams.py new file mode 100644 index 000000000000..5645af953600 --- /dev/null +++ b/airbyte-integrations/connectors/source-orbit/source_orbit/streams.py @@ -0,0 +1,96 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import urllib.parse +from abc import ABC +from typing import Any, Iterable, Mapping, MutableMapping, Optional + +import requests +from airbyte_cdk.sources.streams.http import HttpStream + + +class OrbitStream(HttpStream, ABC): + url_base = "https://app.orbit.love/api/v1/" + + def __init__(self, workspace: str, start_date: Optional[str] = None, **kwargs): + super().__init__(**kwargs) + self.workspace = workspace + self.start_date = start_date + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + return None + + def parse_response( + self, + response: requests.Response, + stream_state: Mapping[str, Any] = None, + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> Iterable[Mapping]: + data = response.json() + records = data["data"] + yield from records + + +class OrbitStreamPaginated(OrbitStream): + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, str]]: + decoded_response = response.json() + links = decoded_response.get("links") + if not links: + return None + + next = links.get("next") + if not next: + return None + + next_url = urllib.parse.urlparse(next) + return {str(k): str(v) for (k, v) in urllib.parse.parse_qsl(next_url.query)} + + def request_params( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, any] = None, next_page_token: Mapping[str, Any] = None + ) -> MutableMapping[str, Any]: + + params = super().request_params(stream_state, stream_slice, next_page_token) + return {**params, **next_page_token} if next_page_token else params + + +class Members(OrbitStreamPaginated): + # Docs: https://docs.orbit.love/reference/members-overview + primary_key = "id" + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + return f"{self.workspace}/members" + + def request_params( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, any] = None, next_page_token: Mapping[str, Any] = None + ) -> MutableMapping[str, Any]: + + params = super().request_params(stream_state, stream_slice, next_page_token) + params["sort"] = "created_at" + if self.start_date is not None: + params["start_date"] = self.start_date # The start_date parameter is filtering the last_activity_occurred_at field + return params + + +class Workspace(OrbitStream): + # Docs: https://docs.orbit.love/reference/get_workspaces-workspace-slug + # This stream is primarily used for connnection checking. + primary_key = "id" + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + return f"workspaces/{self.workspace}" + + def parse_response( + self, + response: requests.Response, + stream_state: Mapping[str, Any] = None, + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> Iterable[Mapping]: + data = response.json() + yield data["data"] diff --git a/airbyte-integrations/connectors/source-orbit/unit_tests/__init__.py b/airbyte-integrations/connectors/source-orbit/unit_tests/__init__.py new file mode 100644 index 000000000000..1100c1c58cf5 --- /dev/null +++ b/airbyte-integrations/connectors/source-orbit/unit_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-orbit/unit_tests/test_source.py b/airbyte-integrations/connectors/source-orbit/unit_tests/test_source.py new file mode 100644 index 000000000000..1cdcf36126d4 --- /dev/null +++ b/airbyte-integrations/connectors/source-orbit/unit_tests/test_source.py @@ -0,0 +1,38 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from unittest.mock import MagicMock + +import pytest +from source_orbit.source import SourceOrbit, Workspace + + +@pytest.mark.parametrize( + "read_records_side_effect, expected_return_value, expected_error_message", + [ + (iter(["foo", "bar"]), True, None), + ( + Exception("connection error"), + False, + "Please check that your API key and workspace name are entered correctly: Exception('connection error')", + ), + ], +) +def test_check_connection(mocker, read_records_side_effect, expected_return_value, expected_error_message): + source = SourceOrbit() + if expected_error_message: + read_records_mock = mocker.Mock(side_effect=read_records_side_effect) + else: + read_records_mock = mocker.Mock(return_value=read_records_side_effect) + mocker.patch.object(Workspace, "read_records", read_records_mock) + logger_mock, config_mock = MagicMock(), MagicMock() + assert source.check_connection(logger_mock, config_mock) == (expected_return_value, expected_error_message) + + +def test_streams(mocker): + source = SourceOrbit() + config_mock = MagicMock() + streams = source.streams(config_mock) + expected_streams_number = 2 + assert len(streams) == expected_streams_number diff --git a/airbyte-integrations/connectors/source-orbit/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-orbit/unit_tests/test_streams.py new file mode 100644 index 000000000000..4c15591f6d03 --- /dev/null +++ b/airbyte-integrations/connectors/source-orbit/unit_tests/test_streams.py @@ -0,0 +1,98 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from http import HTTPStatus +from unittest.mock import MagicMock + +import pytest +from source_orbit.streams import Members, OrbitStream, OrbitStreamPaginated + + +@pytest.fixture +def patch_base_class(mocker): + # Mock abstract methods to enable instantiating abstract class + mocker.patch.object(OrbitStream, "path", "v0/example_endpoint") + mocker.patch.object(OrbitStream, "primary_key", "test_primary_key") + mocker.patch.object(OrbitStream, "__abstractmethods__", set()) + mocker.patch.object(OrbitStreamPaginated, "__abstractmethods__", set()) + + +def test_request_params(patch_base_class): + stream = OrbitStream(workspace="workspace") + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + expected_params = {} + assert stream.request_params(**inputs) == expected_params + + +def test_next_page_token(patch_base_class): + stream = OrbitStream(workspace="workspace") + inputs = {"response": MagicMock()} + expected_token = None + assert stream.next_page_token(**inputs) == expected_token + + +def test_parse_response(patch_base_class, mocker): + stream = OrbitStream(workspace="workspace") + inputs = {"response": mocker.Mock(json=mocker.Mock(return_value={"data": ["foo", "bar"]}))} + gen = stream.parse_response(**inputs) + assert next(gen) == "foo" + assert next(gen) == "bar" + + +def test_request_headers(patch_base_class): + stream = OrbitStream(workspace="workspace") + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + expected_headers = {} + assert stream.request_headers(**inputs) == expected_headers + + +def test_http_method(patch_base_class): + stream = OrbitStream(workspace="workspace") + expected_method = "GET" + assert stream.http_method == expected_method + + +@pytest.mark.parametrize( + ("http_status", "should_retry"), + [ + (HTTPStatus.OK, False), + (HTTPStatus.BAD_REQUEST, False), + (HTTPStatus.TOO_MANY_REQUESTS, True), + (HTTPStatus.INTERNAL_SERVER_ERROR, True), + ], +) +def test_should_retry(patch_base_class, http_status, should_retry): + response_mock = MagicMock() + response_mock.status_code = http_status + stream = OrbitStream(workspace="workspace") + assert stream.should_retry(response_mock) == should_retry + + +def test_backoff_time(patch_base_class): + response_mock = MagicMock() + stream = OrbitStream(workspace="workspace") + expected_backoff_time = None + assert stream.backoff_time(response_mock) == expected_backoff_time + + +class TestOrbitStreamPaginated: + @pytest.mark.parametrize( + "json_response, expected_token", [({"links": {"next": "http://foo.bar/api?a=b&c=d"}}, {"a": "b", "c": "d"}), ({}, None)] + ) + def test_next_page_token(self, patch_base_class, mocker, json_response, expected_token): + stream = OrbitStreamPaginated(workspace="workspace") + inputs = {"response": mocker.Mock(json=mocker.Mock(return_value=json_response))} + assert stream.next_page_token(**inputs) == expected_token + + +class TestMembers: + @pytest.mark.parametrize("start_date", [None, "2022-06-27"]) + def test_members_request_params(self, patch_base_class, start_date): + stream = Members(workspace="workspace", start_date=start_date) + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + if start_date is not None: + expected_params = {"sort": "created_at", "start_date": start_date} + else: + expected_params = {"sort": "created_at"} + assert stream.request_params(**inputs) == expected_params diff --git a/airbyte-integrations/connectors/source-paypal-transaction/Dockerfile b/airbyte-integrations/connectors/source-paypal-transaction/Dockerfile index 0c89c2ae4650..9b4e721e9204 100644 --- a/airbyte-integrations/connectors/source-paypal-transaction/Dockerfile +++ b/airbyte-integrations/connectors/source-paypal-transaction/Dockerfile @@ -12,5 +12,5 @@ RUN pip install . ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.5 +LABEL io.airbyte.version=0.1.6 LABEL io.airbyte.name=airbyte/source-paypal-transaction diff --git a/airbyte-integrations/connectors/source-paypal-transaction/acceptance-test-config.yml b/airbyte-integrations/connectors/source-paypal-transaction/acceptance-test-config.yml index d30a7edcf39e..d9c1343fbb0f 100644 --- a/airbyte-integrations/connectors/source-paypal-transaction/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-paypal-transaction/acceptance-test-config.yml @@ -15,6 +15,7 @@ tests: # Sometimes test could fail (on weekends) because transactions could temporary disappear from Paypal Sandbox account - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: ["transactions"] # Two-sequence read is failing because of "last_refresh_time" property inside of response, # It is enough to have basic_read test for all the records to check. # full_refresh: diff --git a/airbyte-integrations/connectors/source-paypal-transaction/source_paypal_transaction/schemas/transactions.json b/airbyte-integrations/connectors/source-paypal-transaction/source_paypal_transaction/schemas/transactions.json index 7443e216f303..af9c89910ac9 100644 --- a/airbyte-integrations/connectors/source-paypal-transaction/source_paypal_transaction/schemas/transactions.json +++ b/airbyte-integrations/connectors/source-paypal-transaction/source_paypal_transaction/schemas/transactions.json @@ -5,29 +5,50 @@ "transaction_info": { "type": ["null", "object"], "properties": { + "paypal_reference_id": { + "type": ["null", "string"], + "maxLength": 24 + }, + "paypal_reference_id_type": { + "type": ["null", "string"], + "maxLength": 3, + "minLength": 3 + }, + "protection_eligibility": { + "type": ["null", "string"], + "maxLength": 2 + }, "paypal_account_id": { - "type": ["null", "string"] + "type": ["null", "string"], + "maxLength": 24 }, "transaction_id": { - "type": ["null", "string"] + "type": ["null", "string"], + "maxLength": 24 }, "transaction_event_code": { - "type": ["null", "string"] + "type": ["null", "string"], + "maxLength": 5 }, "transaction_initiation_date": { - "type": ["null", "string"] + "type": ["null", "string"], + "format": "date-time" }, "transaction_updated_date": { - "type": ["null", "string"] + "type": ["null", "string"], + "format": "date-time" }, "transaction_amount": { "type": ["null", "object"], "properties": { "currency_code": { - "type": ["null", "string"] + "type": "string", + "maxLength": 3, + "minLength": 3 }, "value": { - "type": ["null", "string"] + "type": "string", + "maxLength": 32 } } }, @@ -35,10 +56,13 @@ "type": ["null", "object"], "properties": { "currency_code": { - "type": ["null", "string"] + "type": "string", + "maxLength": 3, + "minLength": 3 }, "value": { - "type": ["null", "string"] + "type": "string", + "maxLength": 32 } } }, @@ -46,10 +70,13 @@ "type": ["null", "object"], "properties": { "currency_code": { - "type": ["null", "string"] + "type": "string", + "maxLength": 3, + "minLength": 3 }, "value": { - "type": ["null", "string"] + "type": "string", + "maxLength": 32 } } }, @@ -57,10 +84,13 @@ "type": ["null", "object"], "properties": { "currency_code": { - "type": ["null", "string"] + "type": "string", + "maxLength": 3, + "minLength": 3 }, "value": { - "type": ["null", "string"] + "type": "string", + "maxLength": 32 } } }, @@ -68,30 +98,34 @@ "type": ["null", "object"], "properties": { "currency_code": { - "type": ["null", "string"] + "type": "string", + "maxLength": 3, + "minLength": 3 }, "value": { - "type": ["null", "string"] + "type": "string", + "maxLength": 32 } } }, "transaction_status": { - "type": ["null", "string"] + "type": ["null", "string"], + "maxLength": 1 }, "transaction_subject": { - "type": ["null", "string"] + "type": ["null", "string"], + "maxLength": 256 }, "transaction_note": { "type": ["null", "string"] }, "invoice_id": { - "type": ["null", "string"] + "type": ["null", "string"], + "maxLength": 127 }, "custom_field": { - "type": ["null", "string"] - }, - "protection_eligibility": { - "type": ["null", "string"] + "type": ["null", "string"], + "maxLength": 127 } } }, @@ -99,33 +133,41 @@ "type": ["null", "object"], "properties": { "account_id": { - "type": ["null", "string"] + "type": ["null", "string"], + "maxLength": 13 }, "email_address": { - "type": ["null", "string"] + "type": ["null", "string"], + "maxLength": 256 }, "address_status": { - "type": ["null", "string"] + "type": ["null", "string"], + "maxLength": 1 }, "payer_status": { - "type": ["null", "string"] + "type": ["null", "string"], + "maxLength": 1 }, "payer_name": { "type": ["null", "object"], "properties": { "given_name": { - "type": ["null", "string"] + "type": ["null", "string"], + "maxLength": 256 }, "surname": { - "type": ["null", "string"] + "type": ["null", "string"], + "maxLength": 256 }, "alternate_full_name": { - "type": ["null", "string"] + "type": ["null", "string"], + "maxLength": 256 } } }, "country_code": { - "type": ["null", "string"] + "type": ["null", "string"], + "maxLength": 3 } } }, @@ -133,7 +175,8 @@ "type": ["null", "object"], "properties": { "name": { - "type": ["null", "string"] + "type": ["null", "string"], + "maxLength": 500 }, "address": { "type": ["null", "object"], @@ -145,13 +188,16 @@ "type": ["null", "string"] }, "city": { - "type": ["null", "string"] + "type": ["null", "string"], + "maxLength": 256 }, "country_code": { - "type": ["null", "string"] + "type": ["null", "string"], + "maxLength": 3 }, "postal_code": { - "type": ["null", "string"] + "type": ["null", "string"], + "maxLength": 256 } } } @@ -166,10 +212,12 @@ "type": ["null", "object"], "properties": { "item_code": { - "type": ["null", "string"] + "type": ["null", "string"], + "maxLength": 1000 }, "item_name": { - "type": ["null", "string"] + "type": ["null", "string"], + "maxLength": 256 }, "item_description": { "type": ["null", "string"] @@ -181,10 +229,13 @@ "type": ["null", "object"], "properties": { "currency_code": { - "type": ["null", "string"] + "type": "string", + "maxLength": 3, + "minLength": 3 }, "value": { - "type": ["null", "string"] + "type": "string", + "maxLength": 32 } } }, @@ -192,10 +243,13 @@ "type": ["null", "object"], "properties": { "currency_code": { - "type": ["null", "string"] + "type": "string", + "maxLength": 3, + "minLength": 3 }, "value": { - "type": ["null", "string"] + "type": "string", + "maxLength": 32 } } }, @@ -208,10 +262,13 @@ "type": ["null", "object"], "properties": { "currency_code": { - "type": ["null", "string"] + "type": "string", + "maxLength": 3, + "minLength": 3 }, "value": { - "type": ["null", "string"] + "type": "string", + "maxLength": 32 } } } @@ -222,15 +279,19 @@ "type": ["null", "object"], "properties": { "currency_code": { - "type": ["null", "string"] + "type": "string", + "maxLength": 3, + "minLength": 3 }, "value": { - "type": ["null", "string"] + "type": "string", + "maxLength": 32 } } }, "invoice_number": { - "type": ["null", "string"] + "type": ["null", "string"], + "maxLength": 200 } } } @@ -241,10 +302,12 @@ "type": ["null", "object"], "properties": { "store_id": { - "type": ["null", "string"] + "type": ["null", "string"], + "maxLength": 100 }, "terminal_id": { - "type": ["null", "string"] + "type": ["null", "string"], + "maxLength": 60 } } }, @@ -252,16 +315,19 @@ "type": ["null", "object"], "properties": { "auction_site": { - "type": ["null", "string"] + "type": ["null", "string"], + "maxLength": 200 }, "auction_item_site": { "type": ["null", "string"] }, "auction_buyer_id": { - "type": ["null", "string"] + "type": ["null", "string"], + "maxLength": 500 }, "auction_closing_date": { - "type": ["null", "string"] + "type": ["null", "string"], + "format": "date-time" } } }, @@ -274,24 +340,30 @@ "type": "object", "properties": { "incentive_type": { - "type": ["null", "string"] + "type": ["null", "string"], + "maxLength": 500 }, "incentive_code": { - "type": ["null", "string"] + "type": ["null", "string"], + "maxLength": 200 }, "incentive_amount": { "type": "object", "properties": { "currency_code": { - "type": "string" + "type": "string", + "maxLength": 3, + "minLength": 3 }, "value": { - "type": "string" + "type": "string", + "maxLength": 32 } } }, "incentive_program_code": { - "type": ["null", "string"] + "type": ["null", "string"], + "maxLength": 100 } } } diff --git a/airbyte-integrations/connectors/source-paypal-transaction/source_paypal_transaction/source.py b/airbyte-integrations/connectors/source-paypal-transaction/source_paypal_transaction/source.py index 173cb9af1bcf..14005c7358dd 100644 --- a/airbyte-integrations/connectors/source-paypal-transaction/source_paypal_transaction/source.py +++ b/airbyte-integrations/connectors/source-paypal-transaction/source_paypal_transaction/source.py @@ -7,12 +7,13 @@ import time from abc import ABC from datetime import datetime, timedelta -from typing import Any, Callable, Iterable, List, Mapping, MutableMapping, Optional, Tuple, Union +from typing import Any, Callable, Iterable, List, Mapping, MutableMapping, Optional, Tuple, Union, Dict import requests from airbyte_cdk.sources import AbstractSource from airbyte_cdk.sources.streams import Stream from airbyte_cdk.sources.streams.http import HttpStream +from airbyte_cdk.sources.utils.transform import TransformConfig, TypeTransformer from airbyte_cdk.sources.streams.http.auth import HttpAuthenticator, Oauth2Authenticator from dateutil.parser import isoparse @@ -268,6 +269,7 @@ class Transactions(PaypalTransactionStream): data_field = "transaction_details" primary_key = [["transaction_info", "transaction_id"]] cursor_field = ["transaction_info", "transaction_initiation_date"] + transformer = TypeTransformer(TransformConfig.CustomSchemaNormalization) # TODO handle API error when 1 request returns more than 10000 records. # https://github.com/airbytehq/airbyte/issues/4404 @@ -299,6 +301,15 @@ def request_params( "page_size": self.page_size, "page": page_number, } + + @transformer.registerCustomTransform + def transform_function(original_value: Any, field_schema: Dict[str, Any]) -> Any: + if isinstance(original_value, str) and field_schema["type"] == "number": + return float(original_value) + elif isinstance(original_value, str) and field_schema["type"] == "integer": + return int(original_value) + else: + return original_value class Balances(PaypalTransactionStream): diff --git a/airbyte-integrations/connectors/source-postgres-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/source-postgres-strict-encrypt/Dockerfile index 0de75607e596..91f7860b978f 100644 --- a/airbyte-integrations/connectors/source-postgres-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/source-postgres-strict-encrypt/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-postgres-strict-encrypt COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.4.21 +LABEL io.airbyte.version=0.4.28 LABEL io.airbyte.name=airbyte/source-postgres-strict-encrypt diff --git a/airbyte-integrations/connectors/source-postgres-strict-encrypt/build.gradle b/airbyte-integrations/connectors/source-postgres-strict-encrypt/build.gradle index 8cb689c04eb7..639a31c370be 100644 --- a/airbyte-integrations/connectors/source-postgres-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/source-postgres-strict-encrypt/build.gradle @@ -23,7 +23,7 @@ dependencies { testImplementation testFixtures(project(':airbyte-integrations:connectors:source-jdbc')) testImplementation project(':airbyte-test-utils') - testImplementation libs.testcontainers.postgresql + testImplementation libs.connectors.testcontainers.postgresql integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') diff --git a/airbyte-integrations/connectors/source-postgres-strict-encrypt/src/main/java/io/airbyte/integrations/source/postgres/PostgresSourceStrictEncrypt.java b/airbyte-integrations/connectors/source-postgres-strict-encrypt/src/main/java/io/airbyte/integrations/source/postgres/PostgresSourceStrictEncrypt.java index d83a26344433..4be6d1a5c5ae 100644 --- a/airbyte-integrations/connectors/source-postgres-strict-encrypt/src/main/java/io/airbyte/integrations/source/postgres/PostgresSourceStrictEncrypt.java +++ b/airbyte-integrations/connectors/source-postgres-strict-encrypt/src/main/java/io/airbyte/integrations/source/postgres/PostgresSourceStrictEncrypt.java @@ -13,6 +13,10 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +/** + * This file will soon be removed. Any change to this file should also be duplicated to + * PostgresSourceStrictEncrypt.java in the source-postgres module. + */ public class PostgresSourceStrictEncrypt extends SpecModifyingSource implements Source { private static final Logger LOGGER = LoggerFactory.getLogger(PostgresSourceStrictEncrypt.class); diff --git a/airbyte-integrations/connectors/source-postgres-strict-encrypt/src/test/java/io/airbyte/integrations/source/postgres/PostgresStrictEncryptJdbcSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-postgres-strict-encrypt/src/test/java/io/airbyte/integrations/source/postgres/PostgresStrictEncryptJdbcSourceAcceptanceTest.java deleted file mode 100644 index ba0d126de1fe..000000000000 --- a/airbyte-integrations/connectors/source-postgres-strict-encrypt/src/test/java/io/airbyte/integrations/source/postgres/PostgresStrictEncryptJdbcSourceAcceptanceTest.java +++ /dev/null @@ -1,217 +0,0 @@ -/* - * Copyright (c) 2022 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.source.postgres; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.commons.string.Strings; -import io.airbyte.db.jdbc.JdbcSourceOperations; -import io.airbyte.integrations.base.Source; -import io.airbyte.integrations.base.ssh.SshHelpers; -import io.airbyte.integrations.source.jdbc.JdbcSource; -import io.airbyte.integrations.source.jdbc.test.JdbcSourceAcceptanceTest; -import io.airbyte.integrations.source.relationaldb.models.DbState; -import io.airbyte.integrations.source.relationaldb.models.DbStreamState; -import io.airbyte.protocol.models.AirbyteCatalog; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.protocol.models.AirbyteRecordMessage; -import io.airbyte.protocol.models.AirbyteStateMessage; -import io.airbyte.protocol.models.CatalogHelpers; -import io.airbyte.protocol.models.ConnectorSpecification; -import io.airbyte.protocol.models.Field; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.protocol.models.SyncMode; -import io.airbyte.test.utils.PostgreSQLContainerHelper; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.function.Function; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.testcontainers.containers.PostgreSQLContainer; -import org.testcontainers.utility.MountableFile; - -class PostgresStrictEncryptJdbcSourceAcceptanceTest extends JdbcSourceAcceptanceTest { - - private static PostgreSQLContainer PSQL_DB; - - private JsonNode config; - - @BeforeAll - static void init() { - PSQL_DB = new PostgreSQLContainer<>("postgres:13-alpine"); - PSQL_DB.start(); - } - - @BeforeEach - public void setup() throws Exception { - final String dbName = Strings.addRandomSuffix("db", "_", 10).toLowerCase(); - - config = Jsons.jsonNode(ImmutableMap.builder() - .put("host", PSQL_DB.getHost()) - .put("port", PSQL_DB.getFirstMappedPort()) - .put("database", dbName) - .put("username", PSQL_DB.getUsername()) - .put("password", PSQL_DB.getPassword()) - .put("ssl", false) - .build()); - - final String initScriptName = "init_" + dbName.concat(".sql"); - final String tmpFilePath = IOs.writeFileToRandomTmpDir(initScriptName, "CREATE DATABASE " + dbName + ";"); - PostgreSQLContainerHelper.runSqlScript(MountableFile.forHostPath(tmpFilePath), PSQL_DB); - - super.setup(); - } - - @Override - public boolean supportsSchemas() { - return true; - } - - @Override - public JdbcSource getJdbcSource() { - return null; - } - - @Override - public Source getSource() { - return new PostgresSourceStrictEncrypt(); - } - - @Override - public Function getToDatabaseConfigFunction() { - return new PostgresSource()::toDatabaseConfig; - } - - @Override - public JsonNode getConfig() { - return config; - } - - @Override - public String getDriverClass() { - return PostgresSource.DRIVER_CLASS; - } - - @AfterAll - static void cleanUp() { - PSQL_DB.close(); - } - - @Override - protected List getTestMessages() { - return Lists.newArrayList( - new AirbyteMessage().withType(AirbyteMessage.Type.RECORD) - .withRecord(new AirbyteRecordMessage().withStream(streamName).withNamespace(getDefaultNamespace()) - .withData(Jsons.jsonNode(ImmutableMap - .of(COL_ID, ID_VALUE_1, - COL_NAME, "picard", - COL_UPDATED_AT, "2004-10-19")))), - new AirbyteMessage().withType(AirbyteMessage.Type.RECORD) - .withRecord(new AirbyteRecordMessage().withStream(streamName).withNamespace(getDefaultNamespace()) - .withData(Jsons.jsonNode(ImmutableMap - .of(COL_ID, ID_VALUE_2, - COL_NAME, "crusher", - COL_UPDATED_AT, - "2005-10-19")))), - new AirbyteMessage().withType(AirbyteMessage.Type.RECORD) - .withRecord(new AirbyteRecordMessage().withStream(streamName).withNamespace(getDefaultNamespace()) - .withData(Jsons.jsonNode(ImmutableMap - .of(COL_ID, ID_VALUE_3, - COL_NAME, "vash", - COL_UPDATED_AT, "2006-10-19"))))); - } - - @Override - protected AirbyteCatalog getCatalog(final String defaultNamespace) { - return new AirbyteCatalog().withStreams(Lists.newArrayList( - CatalogHelpers.createAirbyteStream( - TABLE_NAME, - defaultNamespace, - Field.of(COL_ID, JsonSchemaType.NUMBER), - Field.of(COL_NAME, JsonSchemaType.STRING), - Field.of(COL_UPDATED_AT, JsonSchemaType.STRING_DATE)) - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) - .withSourceDefinedPrimaryKey(List.of(List.of(COL_ID))), - CatalogHelpers.createAirbyteStream( - TABLE_NAME_WITHOUT_PK, - defaultNamespace, - Field.of(COL_ID, JsonSchemaType.NUMBER), - Field.of(COL_NAME, JsonSchemaType.STRING), - Field.of(COL_UPDATED_AT, JsonSchemaType.STRING_DATE)) - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) - .withSourceDefinedPrimaryKey(Collections.emptyList()), - CatalogHelpers.createAirbyteStream( - TABLE_NAME_COMPOSITE_PK, - defaultNamespace, - Field.of(COL_FIRST_NAME, JsonSchemaType.STRING), - Field.of(COL_LAST_NAME, JsonSchemaType.STRING), - Field.of(COL_UPDATED_AT, JsonSchemaType.STRING_DATE)) - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) - .withSourceDefinedPrimaryKey( - List.of(List.of(COL_FIRST_NAME), List.of(COL_LAST_NAME))))); - } - - @Override - protected void incrementalTimestampCheck() throws Exception { - super.incrementalCursorCheck(COL_UPDATED_AT, - "2005-10-18", - "2006-10-19", - Lists.newArrayList(getTestMessages().get(1), - getTestMessages().get(2))); - } - - @Override - protected JdbcSourceOperations getSourceOperations() { - return new PostgresSourceOperations(); - } - - @Override - protected List getExpectedAirbyteMessagesSecondSync(String namespace) { - final List expectedMessages = new ArrayList<>(); - expectedMessages.add(new AirbyteMessage().withType(AirbyteMessage.Type.RECORD) - .withRecord(new AirbyteRecordMessage().withStream(streamName).withNamespace(namespace) - .withData(Jsons.jsonNode(ImmutableMap - .of(COL_ID, ID_VALUE_4, - COL_NAME, "riker", - COL_UPDATED_AT, "2006-10-19"))))); - expectedMessages.add(new AirbyteMessage().withType(AirbyteMessage.Type.RECORD) - .withRecord(new AirbyteRecordMessage().withStream(streamName).withNamespace(namespace) - .withData(Jsons.jsonNode(ImmutableMap - .of(COL_ID, ID_VALUE_5, - COL_NAME, "data", - COL_UPDATED_AT, "2006-10-19"))))); - expectedMessages.add(new AirbyteMessage() - .withType(AirbyteMessage.Type.STATE) - .withState(new AirbyteStateMessage() - .withData(Jsons.jsonNode(new DbState() - .withCdc(false) - .withStreams(Lists.newArrayList(new DbStreamState() - .withStreamName(streamName) - .withStreamNamespace(namespace) - .withCursorField(ImmutableList.of(COL_ID)) - .withCursor("5"))))))); - return expectedMessages; - } - - @Test - void testSpec() throws Exception { - final ConnectorSpecification actual = source.spec(); - final ConnectorSpecification expected = - SshHelpers.injectSshIntoSpec(Jsons.deserialize(MoreResources.readResource("expected_spec.json"), ConnectorSpecification.class)); - - assertEquals(expected, actual); - } - -} diff --git a/airbyte-integrations/connectors/source-postgres/Dockerfile b/airbyte-integrations/connectors/source-postgres/Dockerfile index 42f2984e039d..3c38a53727f9 100644 --- a/airbyte-integrations/connectors/source-postgres/Dockerfile +++ b/airbyte-integrations/connectors/source-postgres/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-postgres COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.4.21 +LABEL io.airbyte.version=0.4.28 LABEL io.airbyte.name=airbyte/source-postgres diff --git a/airbyte-integrations/connectors/source-postgres/build.gradle b/airbyte-integrations/connectors/source-postgres/build.gradle index de93315dd134..a780b5ced643 100644 --- a/airbyte-integrations/connectors/source-postgres/build.gradle +++ b/airbyte-integrations/connectors/source-postgres/build.gradle @@ -6,14 +6,14 @@ plugins { } application { - mainClass = 'io.airbyte.integrations.source.postgres.PostgresSource' + mainClass = 'io.airbyte.integrations.source.postgres.PostgresSourceRunner' applicationDefaultJvmArgs = ['-XX:+ExitOnOutOfMemoryError', '-XX:MaxRAMPercentage=75.0'] } dependencies { implementation project(':airbyte-db:db-lib') implementation project(':airbyte-integrations:bases:base-java') - implementation project(':airbyte-integrations:bases:debezium') + implementation project(':airbyte-integrations:bases:debezium-v1-9-2') implementation project(':airbyte-protocol:protocol-models') implementation project(':airbyte-integrations:connectors:source-jdbc') implementation project(':airbyte-integrations:connectors:source-relational-db') @@ -21,12 +21,12 @@ dependencies { implementation 'org.apache.commons:commons-lang3:3.11' implementation libs.postgresql - testImplementation testFixtures(project(':airbyte-integrations:bases:debezium')) + testImplementation testFixtures(project(':airbyte-integrations:bases:debezium-v1-9-2')) testImplementation testFixtures(project(':airbyte-integrations:connectors:source-jdbc')) testImplementation project(":airbyte-json-validation") testImplementation project(':airbyte-test-utils') - - testImplementation libs.testcontainers.postgresql + testImplementation libs.connectors.testcontainers.jdbc + testImplementation libs.connectors.testcontainers.postgresql integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') performanceTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') diff --git a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresCdcProperties.java b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresCdcProperties.java index 0e8ac93958c9..a09044fccd46 100644 --- a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresCdcProperties.java +++ b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresCdcProperties.java @@ -13,7 +13,7 @@ static Properties getDebeziumProperties(final JsonNode config) { final Properties props = new Properties(); props.setProperty("plugin.name", PostgresUtils.getPluginValue(config.get("replication_method"))); props.setProperty("connector.class", "io.debezium.connector.postgresql.PostgresConnector"); - props.setProperty("snapshot.mode", "exported"); + props.setProperty("snapshot.mode", "initial"); props.setProperty("slot.name", config.get("replication_method").get("replication_slot").asText()); props.setProperty("publication.name", config.get("replication_method").get("publication").asText()); diff --git a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresCdcStateHandler.java b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresCdcStateHandler.java index 50c93d0405ce..6175f81c904f 100644 --- a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresCdcStateHandler.java +++ b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresCdcStateHandler.java @@ -7,12 +7,13 @@ import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.debezium.CdcStateHandler; -import io.airbyte.integrations.source.relationaldb.StateManager; import io.airbyte.integrations.source.relationaldb.models.CdcState; +import io.airbyte.integrations.source.relationaldb.state.StateManager; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteMessage.Type; import io.airbyte.protocol.models.AirbyteStateMessage; import java.util.Map; +import java.util.Optional; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -31,7 +32,11 @@ public AirbyteMessage saveState(final Map offset, final String d LOGGER.info("debezium state: {}", asJson); final CdcState cdcState = new CdcState().withState(asJson); stateManager.getCdcStateManager().setCdcState(cdcState); - final AirbyteStateMessage stateMessage = stateManager.emit(); + /* + * Namespace pair is ignored by global state manager, but is needed for satisfy the API contract. + * Therefore, provide an empty optional. + */ + final AirbyteStateMessage stateMessage = stateManager.emit(Optional.empty()); return new AirbyteMessage().withType(Type.STATE).withState(stateMessage); } diff --git a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSource.java b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSource.java index cb83f7324c69..b98d741164fd 100644 --- a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSource.java +++ b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSource.java @@ -4,6 +4,7 @@ package io.airbyte.integrations.source.postgres; +import static io.airbyte.integrations.debezium.AirbyteDebeziumHandler.shouldUseCDC; import static io.airbyte.integrations.debezium.internals.DebeziumEventUtils.CDC_DELETED_AT; import static io.airbyte.integrations.debezium.internals.DebeziumEventUtils.CDC_UPDATED_AT; import static java.util.stream.Collectors.toList; @@ -26,12 +27,17 @@ import io.airbyte.integrations.debezium.AirbyteDebeziumHandler; import io.airbyte.integrations.source.jdbc.AbstractJdbcSource; import io.airbyte.integrations.source.jdbc.dto.JdbcPrivilegeDto; -import io.airbyte.integrations.source.relationaldb.StateManager; import io.airbyte.integrations.source.relationaldb.TableInfo; +import io.airbyte.integrations.source.relationaldb.models.CdcState; +import io.airbyte.integrations.source.relationaldb.state.StateManager; import io.airbyte.protocol.models.AirbyteCatalog; import io.airbyte.protocol.models.AirbyteConnectionStatus; +import io.airbyte.protocol.models.AirbyteGlobalState; import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.AirbyteStreamState; import io.airbyte.protocol.models.CommonField; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.SyncMode; @@ -227,16 +233,8 @@ public List> getIncrementalIterators( final Map>> tableNameToTable, final StateManager stateManager, final Instant emittedAt) { - /** - * If a customer sets up a postgres source with cdc parameters (replication_slot and publication) - * but selects all the tables in FULL_REFRESH mode then we would still end up going through this - * path. We do have a check in place for debezium to make sure only tales in INCREMENTAL mode are - * synced {@link DebeziumRecordPublisher#getTableWhitelist(ConfiguredAirbyteCatalog)} but we should - * have a check here as well to make sure that if no table is in INCREMENTAL mode then skip this - * part - */ final JsonNode sourceConfig = database.getSourceConfig(); - if (isCdc(sourceConfig)) { + if (isCdc(sourceConfig) && shouldUseCDC(catalog)) { final AirbyteDebeziumHandler handler = new AirbyteDebeziumHandler(sourceConfig, PostgresCdcTargetPosition.targetPosition(database), PostgresCdcProperties.getDebeziumProperties(sourceConfig), catalog, false); @@ -404,6 +402,27 @@ private static AirbyteStream addCdcMetadataColumns(final AirbyteStream stream) { return stream; } + // TODO This is a temporary override so that the Postgres source can take advantage of per-stream + // state + @Override + protected List generateEmptyInitialState(final JsonNode config) { + if (getSupportedStateType(config) == AirbyteStateType.GLOBAL) { + final AirbyteGlobalState globalState = new AirbyteGlobalState() + .withSharedState(Jsons.jsonNode(new CdcState())) + .withStreamStates(List.of()); + return List.of(new AirbyteStateMessage().withType(AirbyteStateType.GLOBAL).withGlobal(globalState)); + } else { + return List.of(new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState())); + } + } + + @Override + protected AirbyteStateType getSupportedStateType(final JsonNode config) { + return isCdc(config) ? AirbyteStateType.GLOBAL : AirbyteStateType.STREAM; + } + public static void main(final String[] args) throws Exception { final Source source = PostgresSource.sshWrappedSource(); LOGGER.info("starting source: {}", PostgresSource.class); diff --git a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSourceOperations.java b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSourceOperations.java index 4d8247798a79..8984fa088b1f 100644 --- a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSourceOperations.java +++ b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSourceOperations.java @@ -4,6 +4,8 @@ package io.airbyte.integrations.source.postgres; +import static io.airbyte.db.DataTypeUtils.TIMESTAMP_FORMATTER; +import static io.airbyte.db.DataTypeUtils.TIME_FORMATTER; import static io.airbyte.db.jdbc.JdbcConstants.INTERNAL_COLUMN_NAME; import static io.airbyte.db.jdbc.JdbcConstants.INTERNAL_COLUMN_TYPE; import static io.airbyte.db.jdbc.JdbcConstants.INTERNAL_COLUMN_TYPE_NAME; @@ -21,7 +23,6 @@ import io.airbyte.db.jdbc.JdbcSourceOperations; import io.airbyte.protocol.models.JsonSchemaType; import java.math.BigDecimal; -import java.sql.Date; import java.sql.JDBCType; import java.sql.PreparedStatement; import java.sql.ResultSet; @@ -30,6 +31,9 @@ import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; +import java.time.OffsetDateTime; +import java.time.OffsetTime; +import java.time.format.DateTimeParseException; import java.util.Collections; import org.postgresql.jdbc.PgResultSetMetaData; import org.slf4j.Logger; @@ -79,15 +83,81 @@ public JsonNode rowToJson(final ResultSet queryContext) throws SQLException { } @Override - protected void setDate(final PreparedStatement preparedStatement, final int parameterIndex, final String value) throws SQLException { + public void setStatementField(final PreparedStatement preparedStatement, + final int parameterIndex, + final JDBCType cursorFieldType, + final String value) + throws SQLException { + switch (cursorFieldType) { + + case TIMESTAMP -> setTimestamp(preparedStatement, parameterIndex, value); + case TIMESTAMP_WITH_TIMEZONE -> setTimestampWithTimezone(preparedStatement, parameterIndex, value); + case TIME -> setTime(preparedStatement, parameterIndex, value); + case TIME_WITH_TIMEZONE -> setTimeWithTimezone(preparedStatement, parameterIndex, value); + case DATE -> setDate(preparedStatement, parameterIndex, value); + case BIT -> setBit(preparedStatement, parameterIndex, value); + case BOOLEAN -> setBoolean(preparedStatement, parameterIndex, value); + case TINYINT, SMALLINT -> setShortInt(preparedStatement, parameterIndex, value); + case INTEGER -> setInteger(preparedStatement, parameterIndex, value); + case BIGINT -> setBigInteger(preparedStatement, parameterIndex, value); + case FLOAT, DOUBLE -> setDouble(preparedStatement, parameterIndex, value); + case REAL -> setReal(preparedStatement, parameterIndex, value); + case NUMERIC, DECIMAL -> setDecimal(preparedStatement, parameterIndex, value); + case CHAR, NCHAR, NVARCHAR, VARCHAR, LONGVARCHAR -> setString(preparedStatement, parameterIndex, value); + case BINARY, BLOB -> setBinary(preparedStatement, parameterIndex, value); + // since cursor are expected to be comparable, handle cursor typing strictly and error on + // unrecognized types + default -> throw new IllegalArgumentException(String.format("%s is not supported.", cursorFieldType)); + } + } + + private void setTimeWithTimezone(final PreparedStatement preparedStatement, final int parameterIndex, final String value) throws SQLException { + try { + preparedStatement.setObject(parameterIndex, OffsetTime.parse(value)); + } catch (final DateTimeParseException e) { + // attempt to parse the time w/o timezone. This can be caused by schema created with a different + // version of the connector + preparedStatement.setObject(parameterIndex, LocalTime.parse(value)); + } + } + + private void setTimestampWithTimezone(final PreparedStatement preparedStatement, final int parameterIndex, final String value) throws SQLException { + try { + preparedStatement.setObject(parameterIndex, OffsetDateTime.parse(value)); + } catch (final DateTimeParseException e) { + // attempt to parse the datetime w/o timezone. This can be caused by schema created with a different + // version of the connector + preparedStatement.setObject(parameterIndex, LocalDateTime.parse(value)); + } + } + + @Override + protected void setTimestamp(final PreparedStatement preparedStatement, final int parameterIndex, final String value) throws SQLException { + try { + preparedStatement.setObject(parameterIndex, LocalDateTime.parse(value)); + } catch (final DateTimeParseException e) { + // attempt to parse the datetime with timezone. This can be caused by schema created with an older + // version of the connector + preparedStatement.setObject(parameterIndex, OffsetDateTime.parse(value)); + } + } + + @Override + protected void setTime(final PreparedStatement preparedStatement, final int parameterIndex, final String value) throws SQLException { try { - Date date = Date.valueOf(value); - preparedStatement.setDate(parameterIndex, date); - } catch (final Exception e) { - throw new RuntimeException(e); + preparedStatement.setObject(parameterIndex, LocalTime.parse(value)); + } catch (final DateTimeParseException e) { + // attempt to parse the datetime with timezone. This can be caused by schema created with an older + // version of the connector + preparedStatement.setObject(parameterIndex, OffsetTime.parse(value)); } } + @Override + protected void setDate(final PreparedStatement preparedStatement, final int parameterIndex, final String value) throws SQLException { + preparedStatement.setObject(parameterIndex, LocalDate.parse(value)); + } + @Override public void setJsonField(final ResultSet resultSet, final int colIndex, final ObjectNode json) throws SQLException { final PgResultSetMetaData metadata = (PgResultSetMetaData) resultSet.getMetaData(); @@ -127,22 +197,22 @@ public void setJsonField(final ResultSet resultSet, final int colIndex, final Ob } @Override - protected void putDate(ObjectNode node, String columnName, ResultSet resultSet, int index) throws SQLException { - LocalDate date = getDateTimeObject(resultSet, index, LocalDate.class); + protected void putDate(final ObjectNode node, final String columnName, final ResultSet resultSet, final int index) throws SQLException { + final LocalDate date = getDateTimeObject(resultSet, index, LocalDate.class); node.put(columnName, resolveEra(date, date.toString())); } @Override - protected void putTime(ObjectNode node, String columnName, ResultSet resultSet, int index) throws SQLException { - LocalTime time = getDateTimeObject(resultSet, index, LocalTime.class); - node.put(columnName, time.toString()); + protected void putTime(final ObjectNode node, final String columnName, final ResultSet resultSet, final int index) throws SQLException { + final LocalTime time = getDateTimeObject(resultSet, index, LocalTime.class); + node.put(columnName, time.format(TIME_FORMATTER)); } @Override - protected void putTimestamp(ObjectNode node, String columnName, ResultSet resultSet, int index) throws SQLException { - LocalDateTime timestamp = getDateTimeObject(resultSet, index, LocalDateTime.class); - LocalDate date = timestamp.toLocalDate(); - node.put(columnName, resolveEra(date, timestamp.toString())); + protected void putTimestamp(final ObjectNode node, final String columnName, final ResultSet resultSet, final int index) throws SQLException { + final LocalDateTime timestamp = getDateTimeObject(resultSet, index, LocalDateTime.class); + final LocalDate date = timestamp.toLocalDate(); + node.put(columnName, resolveEra(date, timestamp.format(TIMESTAMP_FORMATTER))); } @Override @@ -171,7 +241,7 @@ public JDBCType getFieldType(final JsonNode field) { } @Override - public JsonSchemaType getJsonType(JDBCType jdbcType) { + public JsonSchemaType getJsonType(final JDBCType jdbcType) { return switch (jdbcType) { case BOOLEAN -> JsonSchemaType.BOOLEAN; case TINYINT, SMALLINT, INTEGER, BIGINT, FLOAT, DOUBLE, REAL, NUMERIC, DECIMAL -> JsonSchemaType.NUMBER; @@ -221,7 +291,7 @@ private void putHstoreAsJson(final ObjectNode node, final String columnName, fin final var data = resultSet.getObject(index); try { node.put(columnName, OBJECT_MAPPER.writeValueAsString(data)); - } catch (JsonProcessingException e) { + } catch (final JsonProcessingException e) { throw new RuntimeException("Could not parse 'hstore' value:" + e); } } diff --git a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSourceRunner.java b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSourceRunner.java new file mode 100644 index 000000000000..6af7590e480d --- /dev/null +++ b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSourceRunner.java @@ -0,0 +1,18 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.postgres; + +import io.airbyte.integrations.base.adaptive.AdaptiveSourceRunner; + +public class PostgresSourceRunner { + + public static void main(final String[] args) throws Exception { + AdaptiveSourceRunner.baseOnEnv() + .withOssSource(PostgresSource::sshWrappedSource) + .withCloudSource(PostgresSourceStrictEncrypt::new) + .run(args); + } + +} diff --git a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSourceStrictEncrypt.java b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSourceStrictEncrypt.java new file mode 100644 index 000000000000..dc2d069e3f3e --- /dev/null +++ b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSourceStrictEncrypt.java @@ -0,0 +1,42 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.postgres; + +import com.fasterxml.jackson.databind.node.ObjectNode; +import io.airbyte.commons.json.Jsons; +import io.airbyte.integrations.base.IntegrationRunner; +import io.airbyte.integrations.base.Source; +import io.airbyte.integrations.base.spec_modification.SpecModifyingSource; +import io.airbyte.protocol.models.ConnectorSpecification; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * This class is copied from source-postgres-strict-encrypt. The original file can be deleted + * completely once the migration of multi-variant connector is done. + */ +public class PostgresSourceStrictEncrypt extends SpecModifyingSource implements Source { + + private static final Logger LOGGER = LoggerFactory.getLogger(PostgresSourceStrictEncrypt.class); + + PostgresSourceStrictEncrypt() { + super(PostgresSource.sshWrappedSource()); + } + + @Override + public ConnectorSpecification modifySpec(final ConnectorSpecification originalSpec) { + final ConnectorSpecification spec = Jsons.clone(originalSpec); + ((ObjectNode) spec.getConnectionSpecification().get("properties")).remove("ssl"); + return spec; + } + + public static void main(final String[] args) throws Exception { + final Source source = new PostgresSourceStrictEncrypt(); + LOGGER.info("starting source: {}", PostgresSourceStrictEncrypt.class); + new IntegrationRunner(source).run(args); + LOGGER.info("completed source: {}", PostgresSourceStrictEncrypt.class); + } + +} diff --git a/airbyte-integrations/connectors/source-postgres/src/main/resources/spec.json b/airbyte-integrations/connectors/source-postgres/src/main/resources/spec.json index 4123bbc794c9..c8d4b7707d30 100644 --- a/airbyte-integrations/connectors/source-postgres/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/source-postgres/src/main/resources/spec.json @@ -55,8 +55,8 @@ "order": 5 }, "jdbc_url_params": { - "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).", - "title": "JDBC URL Params", + "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). For more information read about additional JDBC URL parameters.", + "title": "JDBC URL Parameters (Advanced)", "type": "string", "order": 6 }, @@ -104,7 +104,7 @@ "plugin": { "type": "string", "title": "Plugin", - "description": "A logical decoding plug-in installed on the PostgreSQL server. `pgoutput` plug-in is used by default.\nIf replication table contains a lot of big jsonb values it is recommended to use `wal2json` plug-in. For more information about `wal2json` plug-in read Postgres Source docs.", + "description": "A logical decoding plug-in installed on the PostgreSQL server. `pgoutput` plug-in is used by default.\nIf replication table contains a lot of big jsonb values it is recommended to use `wal2json` plug-in. For more information about `wal2json` plug-in read Select replication plugin.", "enum": ["pgoutput", "wal2json"], "default": "pgoutput", "order": 1 @@ -112,13 +112,13 @@ "replication_slot": { "type": "string", "title": "Replication Slot", - "description": "A plug-in logical replication slot.", + "description": "A plug-in logical replication slot. For more information read about replication slots.", "order": 2 }, "publication": { "type": "string", "title": "Publication", - "description": "A Postgres publication used for consuming changes.", + "description": "A Postgres publication used for consuming changes. For more information read about publications and replication identities.", "order": 3 } } @@ -128,3 +128,4 @@ } } } + diff --git a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractSshPostgresSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractSshPostgresSourceAcceptanceTest.java index 9f26a2532039..47bdddd32fe6 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractSshPostgresSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractSshPostgresSourceAcceptanceTest.java @@ -6,6 +6,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.Lists; +import io.airbyte.commons.functional.CheckedFunction; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; import io.airbyte.db.factory.DSLContextFactory; @@ -25,17 +26,46 @@ import io.airbyte.protocol.models.SyncMode; import java.util.HashMap; import java.util.List; -import org.jooq.DSLContext; import org.jooq.SQLDialect; +import org.testcontainers.containers.Network; import org.testcontainers.containers.PostgreSQLContainer; public abstract class AbstractSshPostgresSourceAcceptanceTest extends SourceAcceptanceTest { private static final String STREAM_NAME = "public.id_and_name"; private static final String STREAM_NAME2 = "public.starships"; - private PostgreSQLContainer db; - private final SshBastionContainer bastion = new SshBastionContainer(); + private static final Network network = Network.newNetwork(); private static JsonNode config; + private final SshBastionContainer bastion = new SshBastionContainer(); + private PostgreSQLContainer db; + + private static void populateDatabaseTestData() throws Exception { + SshTunnel.sshWrap( + config, + List.of("host"), + List.of("port"), + (CheckedFunction, Exception>) mangledConfig -> getDatabaseFromConfig(mangledConfig) + .query(ctx -> { + ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); + ctx.fetch("INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); + ctx.fetch("CREATE TABLE starships(id INTEGER, name VARCHAR(200));"); + ctx.fetch("INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); + return null; + })); + } + + private static Database getDatabaseFromConfig(final JsonNode config) { + return new Database( + DSLContextFactory.create( + config.get("username").asText(), + config.get("password").asText(), + DatabaseDriver.POSTGRESQL.getDriverClassName(), + String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), + config.get("host").asText(), + config.get("port").asInt(), + config.get("database").asText()), + SQLDialect.POSTGRES)); + } public abstract SshTunnel.TunnelMethod getTunnelMethod(); @@ -50,37 +80,15 @@ protected void setupEnvironment(final TestDestinationEnv environment) throws Exc } private void startTestContainers() { - bastion.initAndStartBastion(); + bastion.initAndStartBastion(network); initAndStartJdbcContainer(); } private void initAndStartJdbcContainer() { - db = new PostgreSQLContainer<>("postgres:13-alpine").withNetwork(bastion.getNetWork()); + db = new PostgreSQLContainer<>("postgres:13-alpine").withNetwork(network); db.start(); } - private static void populateDatabaseTestData() throws Exception { - try (final DSLContext dslContext = DSLContextFactory.create( - config.get("username").asText(), - config.get("password").asText(), - DatabaseDriver.POSTGRESQL.getDriverClassName(), - String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), - config.get("host").asText(), - config.get("port").asInt(), - config.get("database").asText()), - SQLDialect.POSTGRES)) { - final Database database = new Database(dslContext); - - database.query(ctx -> { - ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); - ctx.fetch("INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); - ctx.fetch("CREATE TABLE starships(id INTEGER, name VARCHAR(200));"); - ctx.fetch("INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); - return null; - }); - } - } - @Override protected void tearDown(final TestDestinationEnv testEnv) { bastion.stopAndCloseContainers(db); @@ -129,4 +137,9 @@ protected JsonNode getState() { return Jsons.jsonNode(new HashMap<>()); } + @Override + protected boolean supportsPerStream() { + return true; + } + } diff --git a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcPostgresSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcPostgresSourceAcceptanceTest.java index 19df7527dddf..8deeccb53c64 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcPostgresSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcPostgresSourceAcceptanceTest.java @@ -91,12 +91,12 @@ protected void setupEnvironment(final TestDestinationEnv environment) throws Exc * {@link io.airbyte.integrations.source.postgres.PostgresSource#removeIncrementalWithoutPk(AirbyteStream)} */ database.query(ctx -> { - ctx.execute("SELECT pg_create_logical_replication_slot('" + SLOT_NAME_BASE + "', 'pgoutput');"); - ctx.execute("CREATE PUBLICATION " + PUBLICATION + " FOR ALL TABLES;"); ctx.execute("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); ctx.execute("INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); ctx.execute("CREATE TABLE starships(id INTEGER, name VARCHAR(200));"); ctx.execute("INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); + ctx.execute("SELECT pg_create_logical_replication_slot('" + SLOT_NAME_BASE + "', 'pgoutput');"); + ctx.execute("CREATE PUBLICATION " + PUBLICATION + " FOR ALL TABLES;"); return null; }); } diff --git a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceAcceptanceTest.java index acd1da14241f..623d2ef11e80 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceAcceptanceTest.java @@ -134,4 +134,9 @@ protected JsonNode getState() { return Jsons.jsonNode(new HashMap<>()); } + @Override + protected boolean supportsPerStream() { + return true; + } + } diff --git a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceDatatypeTest.java b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceDatatypeTest.java index 4e8871f46df8..339c8011736d 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceDatatypeTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceDatatypeTest.java @@ -449,8 +449,9 @@ protected void initTests() { .fullSourceDataType(fullSourceType) .airbyteType(JsonSchemaType.STRING_TIME_WITHOUT_TIMEZONE) // time column will ignore time zone - .addInsertValues("null", "'13:00:01'", "'13:00:02+8'", "'13:00:03-8'", "'13:00:04Z'", "'13:00:05Z+8'", "'13:00:06Z-8'") - .addExpectedValues(null, "13:00:01", "13:00:02", "13:00:03", "13:00:04", "13:00:05", "13:00:06") + .addInsertValues("null", "'13:00:01'", "'13:00:02+8'", "'13:00:03-8'", "'13:00:04Z'", "'13:00:05.01234Z+8'", "'13:00:00Z-8'") + .addExpectedValues(null, "13:00:01.000000", "13:00:02.000000", "13:00:03.000000", "13:00:04.000000", "13:00:05.012340", + "13:00:00.000000") .build()); } @@ -461,10 +462,11 @@ protected void initTests() { .sourceType("timetz") .fullSourceDataType(fullSourceType) .airbyteType(JsonSchemaType.STRING_TIME_WITH_TIMEZONE) - .addInsertValues("null", "'13:00:01'", "'13:00:02+8'", "'13:00:03-8'", "'13:00:04Z'", "'13:00:05Z+8'", "'13:00:06Z-8'") + .addInsertValues("null", "'13:00:01'", "'13:00:00+8'", "'13:00:03-8'", "'13:00:04Z'", "'13:00:05.012345Z+8'", "'13:00:06.00000Z-8'") // A time value without time zone will use the time zone set on the database, which is Z-7, // so 13:00:01 is returned as 13:00:01-07. - .addExpectedValues(null, "13:00:01-07:00", "13:00:02+08:00", "13:00:03-08:00", "13:00:04Z", "13:00:05-08:00", "13:00:06+08:00") + .addExpectedValues(null, "13:00:01.000000-07:00", "13:00:00.000000+08:00", "13:00:03.000000-08:00", "13:00:04.000000Z", + "13:00:05.012345-08:00", "13:00:06.000000+08:00") .build()); } @@ -475,8 +477,8 @@ protected void initTests() { .sourceType("timestamp") .fullSourceDataType(fullSourceType) .airbyteType(JsonSchemaType.STRING_TIMESTAMP_WITHOUT_TIMEZONE) - .addInsertValues("TIMESTAMP '2004-10-19 10:23:54'", "TIMESTAMP '2004-10-19 10:23:54.123456'", "null") - .addExpectedValues("2004-10-19T10:23:54", "2004-10-19T10:23:54.123456", null) + .addInsertValues("TIMESTAMP '2004-10-19 10:23:00'", "TIMESTAMP '2004-10-19 10:23:54.123456'", "null") + .addExpectedValues("2004-10-19T10:23:00.000000", "2004-10-19T10:23:54.123456", null) .build()); } @@ -487,9 +489,9 @@ protected void initTests() { .sourceType("timestamptz") .fullSourceDataType(fullSourceType) .airbyteType(JsonSchemaType.STRING_TIMESTAMP_WITH_TIMEZONE) - .addInsertValues("TIMESTAMP '2004-10-19 10:23:54-08'", "TIMESTAMP '2004-10-19 10:23:54.123456-08'", "null") + .addInsertValues("TIMESTAMP '2004-10-19 10:23:00-08'", "TIMESTAMP '2004-10-19 10:23:54.123456-08'", "null") // 2004-10-19T10:23:54Z-8 = 2004-10-19T17:23:54Z - .addExpectedValues("2004-10-19T17:23:54Z", "2004-10-19T17:23:54.123456Z", null) + .addExpectedValues("2004-10-19T17:23:00.000000Z", "2004-10-19T17:23:54.123456Z", null) .build()); } diff --git a/airbyte-integrations/connectors/source-postgres-strict-encrypt/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceStrictEncryptAcceptanceTest.java b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceStrictEncryptAcceptanceTest.java similarity index 92% rename from airbyte-integrations/connectors/source-postgres-strict-encrypt/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceStrictEncryptAcceptanceTest.java rename to airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceStrictEncryptAcceptanceTest.java index 903d112e98bf..6752036e504e 100644 --- a/airbyte-integrations/connectors/source-postgres-strict-encrypt/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceStrictEncryptAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceStrictEncryptAcceptanceTest.java @@ -26,9 +26,16 @@ import java.util.HashMap; import org.jooq.DSLContext; import org.jooq.SQLDialect; +import org.junitpioneer.jupiter.SetEnvironmentVariable; import org.testcontainers.containers.PostgreSQLContainer; import org.testcontainers.utility.DockerImageName; +/** + * This class is copied from source-postgres-strict-encrypt. The original file can be deleted + * completely once the migration of multi-variant connector is done. + */ +@SetEnvironmentVariable(key = "DEPLOYMENT_MODE", + value = "CLOUD") public class PostgresSourceStrictEncryptAcceptanceTest extends SourceAcceptanceTest { private static final String STREAM_NAME = "public.id_and_name"; @@ -82,7 +89,7 @@ protected void tearDown(final TestDestinationEnv testEnv) { @Override protected String getImageName() { - return "airbyte/source-postgres-strict-encrypt:dev"; + return "airbyte/source-postgres:dev"; } @Override @@ -123,4 +130,9 @@ protected JsonNode getState() { return Jsons.jsonNode(new HashMap<>()); } + @Override + protected boolean supportsPerStream() { + return true; + } + } diff --git a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CdcPostgresSourceTest.java b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CdcPostgresSourceTest.java index 477784eda98e..2aa5e03ebfda 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CdcPostgresSourceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CdcPostgresSourceTest.java @@ -20,6 +20,8 @@ import io.airbyte.commons.io.IOs; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.string.Strings; +import io.airbyte.commons.util.AutoCloseableIterator; +import io.airbyte.commons.util.AutoCloseableIterators; import io.airbyte.db.Database; import io.airbyte.db.PgLsn; import io.airbyte.db.factory.DSLContextFactory; @@ -31,11 +33,14 @@ import io.airbyte.integrations.debezium.CdcSourceTest; import io.airbyte.integrations.debezium.CdcTargetPosition; import io.airbyte.protocol.models.AirbyteConnectionStatus; +import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.AirbyteRecordMessage; import io.airbyte.protocol.models.AirbyteStateMessage; import io.airbyte.protocol.models.AirbyteStream; import io.airbyte.test.utils.PostgreSQLContainerHelper; import java.sql.SQLException; import java.util.List; +import java.util.Set; import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.junit.jupiter.api.AfterEach; @@ -47,12 +52,12 @@ abstract class CdcPostgresSourceTest extends CdcSourceTest { - private static final String SLOT_NAME_BASE = "debezium_slot"; - private static final String PUBLICATION = "publication"; + protected static final String SLOT_NAME_BASE = "debezium_slot"; + protected static final String PUBLICATION = "publication"; private PostgreSQLContainer container; - private String dbName; - private Database database; + protected String dbName; + protected Database database; private DSLContext dslContext; private PostgresSource source; private JsonNode config; @@ -83,6 +88,7 @@ protected void setup() throws SQLException { final String fullReplicationSlot = SLOT_NAME_BASE + "_" + dbName; dslContext = getDslContext(config); database = getDatabase(dslContext); + super.setup(); database.query(ctx -> { ctx.execute("SELECT pg_create_logical_replication_slot('" + fullReplicationSlot + "', '" + getPluginName() + "');"); ctx.execute("CREATE PUBLICATION " + PUBLICATION + " FOR ALL TABLES;"); @@ -90,7 +96,6 @@ protected void setup() throws SQLException { return null; }); - super.setup(); } private JsonNode getConfig(final String dbName) { @@ -247,4 +252,70 @@ public String createSchemaQuery(final String schemaName) { return "CREATE SCHEMA " + schemaName + ";"; } + @Override + @Test + public void testRecordsProducedDuringAndAfterSync() throws Exception { + + final int recordsToCreate = 20; + // first batch of records. 20 created here and 6 created in setup method. + for (int recordsCreated = 0; recordsCreated < recordsToCreate; recordsCreated++) { + final JsonNode record = + Jsons.jsonNode(ImmutableMap + .of(COL_ID, 100 + recordsCreated, COL_MAKE_ID, 1, COL_MODEL, + "F-" + recordsCreated)); + writeModelRecord(record); + } + + final AutoCloseableIterator firstBatchIterator = getSource() + .read(getConfig(), CONFIGURED_CATALOG, null); + final List dataFromFirstBatch = AutoCloseableIterators + .toListAndClose(firstBatchIterator); + final List stateAfterFirstBatch = extractStateMessages(dataFromFirstBatch); + assertEquals(1, stateAfterFirstBatch.size()); + assertNotNull(stateAfterFirstBatch.get(0).getData()); + assertExpectedStateMessages(stateAfterFirstBatch); + final Set recordsFromFirstBatch = extractRecordMessages( + dataFromFirstBatch); + assertEquals((MODEL_RECORDS.size() + recordsToCreate), recordsFromFirstBatch.size()); + + // second batch of records again 20 being created + for (int recordsCreated = 0; recordsCreated < recordsToCreate; recordsCreated++) { + final JsonNode record = + Jsons.jsonNode(ImmutableMap + .of(COL_ID, 200 + recordsCreated, COL_MAKE_ID, 1, COL_MODEL, + "F-" + recordsCreated)); + writeModelRecord(record); + } + + final JsonNode state = Jsons.jsonNode(stateAfterFirstBatch); + final AutoCloseableIterator secondBatchIterator = getSource() + .read(getConfig(), CONFIGURED_CATALOG, state); + final List dataFromSecondBatch = AutoCloseableIterators + .toListAndClose(secondBatchIterator); + + final List stateAfterSecondBatch = extractStateMessages(dataFromSecondBatch); + assertEquals(1, stateAfterSecondBatch.size()); + assertNotNull(stateAfterSecondBatch.get(0).getData()); + assertExpectedStateMessages(stateAfterSecondBatch); + + final Set recordsFromSecondBatch = extractRecordMessages( + dataFromSecondBatch); + assertEquals(recordsToCreate * 2, recordsFromSecondBatch.size(), + "Expected 40 records to be replicated in the second sync."); + + // sometimes there can be more than one of these at the end of the snapshot and just before the + // first incremental. + final Set recordsFromFirstBatchWithoutDuplicates = removeDuplicates( + recordsFromFirstBatch); + final Set recordsFromSecondBatchWithoutDuplicates = removeDuplicates( + recordsFromSecondBatch); + + final int recordsCreatedBeforeTestCount = MODEL_RECORDS.size(); + assertTrue(recordsCreatedBeforeTestCount < recordsFromFirstBatchWithoutDuplicates.size(), + "Expected first sync to include records created while the test was running."); + assertEquals((recordsToCreate * 3) + recordsCreatedBeforeTestCount, + recordsFromFirstBatchWithoutDuplicates.size() + recordsFromSecondBatchWithoutDuplicates + .size()); + } + } diff --git a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresJdbcSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresJdbcSourceAcceptanceTest.java index bb25b4493fc2..1695d4ed8543 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresJdbcSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresJdbcSourceAcceptanceTest.java @@ -7,6 +7,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; @@ -14,25 +15,30 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.resources.MoreResources; import io.airbyte.commons.string.Strings; +import io.airbyte.db.factory.DataSourceFactory; import io.airbyte.db.jdbc.JdbcSourceOperations; +import io.airbyte.db.jdbc.JdbcUtils; +import io.airbyte.db.jdbc.StreamingJdbcDatabase; +import io.airbyte.db.jdbc.streaming.AdaptiveStreamingQueryConfig; import io.airbyte.integrations.source.jdbc.AbstractJdbcSource; import io.airbyte.integrations.source.jdbc.test.JdbcSourceAcceptanceTest; -import io.airbyte.integrations.source.relationaldb.models.DbState; import io.airbyte.integrations.source.relationaldb.models.DbStreamState; import io.airbyte.protocol.models.AirbyteCatalog; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteRecordMessage; -import io.airbyte.protocol.models.AirbyteStateMessage; import io.airbyte.protocol.models.CatalogHelpers; +import io.airbyte.protocol.models.ConfiguredAirbyteStream; import io.airbyte.protocol.models.ConnectorSpecification; import io.airbyte.protocol.models.Field; import io.airbyte.protocol.models.JsonSchemaType; import io.airbyte.protocol.models.SyncMode; import io.airbyte.test.utils.PostgreSQLContainerHelper; import java.sql.JDBCType; +import java.sql.SQLException; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.stream.Collectors; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; @@ -43,8 +49,9 @@ class PostgresJdbcSourceAcceptanceTest extends JdbcSourceAcceptanceTest { private static PostgreSQLContainer PSQL_DB; - - private JsonNode config; + public static String COL_WAKEUP_AT = "wakeup_at"; + public static String COL_LAST_VISITED_AT = "last_visited_at"; + public static String COL_LAST_COMMENT_AT = "last_comment_at"; @BeforeAll static void init() { @@ -55,6 +62,12 @@ static void init() { @BeforeEach public void setup() throws Exception { final String dbName = Strings.addRandomSuffix("db", "_", 10).toLowerCase(); + COLUMN_CLAUSE_WITH_PK = + "id INTEGER, name VARCHAR(200), updated_at DATE, wakeup_at TIMETZ, last_visited_at TIMESTAMPTZ, last_comment_at TIMESTAMP"; + COLUMN_CLAUSE_WITHOUT_PK = + "id INTEGER, name VARCHAR(200), updated_at DATE, wakeup_at TIMETZ, last_visited_at TIMESTAMPTZ, last_comment_at TIMESTAMP"; + COLUMN_CLAUSE_WITH_COMPOSITE_PK = + "first_name VARCHAR(200), last_name VARCHAR(200), updated_at DATE, wakeup_at TIMETZ, last_visited_at TIMESTAMPTZ, last_comment_at TIMESTAMP"; config = Jsons.jsonNode(ImmutableMap.builder() .put("host", PSQL_DB.getHost()) @@ -70,7 +83,170 @@ public void setup() throws Exception { final String tmpFilePath = IOs.writeFileToRandomTmpDir(initScriptName, "CREATE DATABASE " + dbName + ";"); PostgreSQLContainerHelper.runSqlScript(MountableFile.forHostPath(tmpFilePath), PSQL_DB); - super.setup(); + source = getSource(); + final JsonNode jdbcConfig = getToDatabaseConfigFunction().apply(config); + + streamName = TABLE_NAME; + + dataSource = DataSourceFactory.create( + jdbcConfig.get("username").asText(), + jdbcConfig.has("password") ? jdbcConfig.get("password").asText() : null, + getDriverClass(), + jdbcConfig.get("jdbc_url").asText(), + JdbcUtils.parseJdbcParameters(jdbcConfig, "connection_properties", getJdbcParameterDelimiter())); + + database = new StreamingJdbcDatabase(dataSource, + JdbcUtils.getDefaultSourceOperations(), + AdaptiveStreamingQueryConfig::new); + + createSchemas(); + + database.execute(connection -> { + + connection.createStatement().execute( + createTableQuery(getFullyQualifiedTableName(TABLE_NAME), COLUMN_CLAUSE_WITH_PK, + primaryKeyClause(Collections.singletonList("id")))); + connection.createStatement().execute( + String.format( + "INSERT INTO %s(id, name, updated_at, wakeup_at, last_visited_at, last_comment_at) VALUES (1,'picard', '2004-10-19','10:10:10.123456-05:00','2004-10-19T17:23:54.123456Z','2004-01-01T17:23:54.123456')", + getFullyQualifiedTableName(TABLE_NAME))); + connection.createStatement().execute( + String.format( + "INSERT INTO %s(id, name, updated_at, wakeup_at, last_visited_at, last_comment_at) VALUES (2, 'crusher', '2005-10-19','11:11:11.123456-05:00','2005-10-19T17:23:54.123456Z','2005-01-01T17:23:54.123456')", + getFullyQualifiedTableName(TABLE_NAME))); + connection.createStatement().execute( + String.format( + "INSERT INTO %s(id, name, updated_at, wakeup_at, last_visited_at, last_comment_at) VALUES (3, 'vash', '2006-10-19','12:12:12.123456-05:00','2006-10-19T17:23:54.123456Z','2006-01-01T17:23:54.123456')", + getFullyQualifiedTableName(TABLE_NAME))); + + connection.createStatement().execute( + createTableQuery(getFullyQualifiedTableName(TABLE_NAME_WITHOUT_PK), + COLUMN_CLAUSE_WITHOUT_PK, "")); + connection.createStatement().execute( + String.format( + "INSERT INTO %s(id, name, updated_at, wakeup_at, last_visited_at, last_comment_at) VALUES (1,'picard', '2004-10-19','12:12:12.123456-05:00','2004-10-19T17:23:54.123456Z','2004-01-01T17:23:54.123456')", + getFullyQualifiedTableName(TABLE_NAME_WITHOUT_PK))); + connection.createStatement().execute( + String.format( + "INSERT INTO %s(id, name, updated_at, wakeup_at, last_visited_at, last_comment_at) VALUES (2, 'crusher', '2005-10-19','11:11:11.123456-05:00','2005-10-19T17:23:54.123456Z','2005-01-01T17:23:54.123456')", + getFullyQualifiedTableName(TABLE_NAME_WITHOUT_PK))); + connection.createStatement().execute( + String.format( + "INSERT INTO %s(id, name, updated_at, wakeup_at, last_visited_at, last_comment_at) VALUES (3, 'vash', '2006-10-19','10:10:10.123456-05:00','2006-10-19T17:23:54.123456Z','2006-01-01T17:23:54.123456')", + getFullyQualifiedTableName(TABLE_NAME_WITHOUT_PK))); + + connection.createStatement().execute( + createTableQuery(getFullyQualifiedTableName(TABLE_NAME_COMPOSITE_PK), + COLUMN_CLAUSE_WITH_COMPOSITE_PK, + primaryKeyClause(ImmutableList.of("first_name", "last_name")))); + connection.createStatement().execute( + String.format( + "INSERT INTO %s(first_name, last_name, updated_at, wakeup_at, last_visited_at, last_comment_at) VALUES ('first' ,'picard', '2004-10-19','12:12:12.123456-05:00','2004-10-19T17:23:54.123456Z','2004-01-01T17:23:54.123456')", + getFullyQualifiedTableName(TABLE_NAME_COMPOSITE_PK))); + connection.createStatement().execute( + String.format( + "INSERT INTO %s(first_name, last_name, updated_at, wakeup_at, last_visited_at, last_comment_at) VALUES ('second', 'crusher', '2005-10-19','11:11:11.123456-05:00','2005-10-19T17:23:54.123456Z','2005-01-01T17:23:54.123456')", + getFullyQualifiedTableName(TABLE_NAME_COMPOSITE_PK))); + connection.createStatement().execute( + String.format( + "INSERT INTO %s(first_name, last_name, updated_at, wakeup_at, last_visited_at, last_comment_at) VALUES ('third', 'vash', '2006-10-19','10:10:10.123456-05:00','2006-10-19T17:23:54.123456Z','2006-01-01T17:23:54.123456')", + getFullyQualifiedTableName(TABLE_NAME_COMPOSITE_PK))); + + }); + + } + + @Override + protected List getAirbyteMessagesReadOneColumn() { + return getTestMessages().stream() + .map(Jsons::clone) + .peek(m -> { + ((ObjectNode) m.getRecord().getData()).remove(COL_NAME); + ((ObjectNode) m.getRecord().getData()).remove(COL_UPDATED_AT); + ((ObjectNode) m.getRecord().getData()).remove(COL_WAKEUP_AT); + ((ObjectNode) m.getRecord().getData()).remove(COL_LAST_VISITED_AT); + ((ObjectNode) m.getRecord().getData()).remove(COL_LAST_COMMENT_AT); + ((ObjectNode) m.getRecord().getData()).replace(COL_ID, + Jsons.jsonNode(m.getRecord().getData().get(COL_ID).asInt())); + }) + .collect(Collectors.toList()); + } + + @Override + protected ArrayList getAirbyteMessagesCheckCursorSpaceInColumnName(final ConfiguredAirbyteStream streamWithSpaces) { + final AirbyteMessage firstMessage = getTestMessages().get(0); + firstMessage.getRecord().setStream(streamWithSpaces.getStream().getName()); + ((ObjectNode) firstMessage.getRecord().getData()).remove(COL_UPDATED_AT); + ((ObjectNode) firstMessage.getRecord().getData()).remove(COL_WAKEUP_AT); + ((ObjectNode) firstMessage.getRecord().getData()).remove(COL_LAST_VISITED_AT); + ((ObjectNode) firstMessage.getRecord().getData()).remove(COL_LAST_COMMENT_AT); + ((ObjectNode) firstMessage.getRecord().getData()).set(COL_LAST_NAME_WITH_SPACE, + ((ObjectNode) firstMessage.getRecord().getData()).remove(COL_NAME)); + + final AirbyteMessage secondMessage = getTestMessages().get(2); + secondMessage.getRecord().setStream(streamWithSpaces.getStream().getName()); + ((ObjectNode) secondMessage.getRecord().getData()).remove(COL_UPDATED_AT); + ((ObjectNode) secondMessage.getRecord().getData()).remove(COL_WAKEUP_AT); + ((ObjectNode) secondMessage.getRecord().getData()).remove(COL_LAST_VISITED_AT); + ((ObjectNode) secondMessage.getRecord().getData()).remove(COL_LAST_COMMENT_AT); + ((ObjectNode) secondMessage.getRecord().getData()).set(COL_LAST_NAME_WITH_SPACE, + ((ObjectNode) secondMessage.getRecord().getData()).remove(COL_NAME)); + + Lists.newArrayList(getTestMessages().get(0), getTestMessages().get(2)); + + return Lists.newArrayList(firstMessage, secondMessage); + } + + @Override + protected List getAirbyteMessagesSecondSync(final String streamName2) { + return getTestMessages() + .stream() + .map(Jsons::clone) + .peek(m -> { + m.getRecord().setStream(streamName2); + m.getRecord().setNamespace(getDefaultNamespace()); + ((ObjectNode) m.getRecord().getData()).remove(COL_UPDATED_AT); + ((ObjectNode) m.getRecord().getData()).remove(COL_WAKEUP_AT); + ((ObjectNode) m.getRecord().getData()).remove(COL_LAST_VISITED_AT); + ((ObjectNode) m.getRecord().getData()).remove(COL_LAST_COMMENT_AT); + ((ObjectNode) m.getRecord().getData()).replace(COL_ID, + Jsons.jsonNode(m.getRecord().getData().get(COL_ID).asInt())); + }) + .collect(Collectors.toList()); + } + + protected List getAirbyteMessagesSecondStreamWithNamespace(final String streamName2) { + return getTestMessages() + .stream() + .map(Jsons::clone) + .peek(m -> { + m.getRecord().setStream(streamName2); + ((ObjectNode) m.getRecord().getData()).remove(COL_UPDATED_AT); + ((ObjectNode) m.getRecord().getData()).remove(COL_WAKEUP_AT); + ((ObjectNode) m.getRecord().getData()).remove(COL_LAST_VISITED_AT); + ((ObjectNode) m.getRecord().getData()).remove(COL_LAST_COMMENT_AT); + ((ObjectNode) m.getRecord().getData()).replace(COL_ID, + Jsons.jsonNode(m.getRecord().getData().get(COL_ID).asInt())); + }) + .collect(Collectors.toList()); + } + + protected List getAirbyteMessagesForTablesWithQuoting(final ConfiguredAirbyteStream streamForTableWithSpaces) { + return getTestMessages() + .stream() + .map(Jsons::clone) + .peek(m -> { + m.getRecord().setStream(streamForTableWithSpaces.getStream().getName()); + ((ObjectNode) m.getRecord().getData()).set(COL_LAST_NAME_WITH_SPACE, + ((ObjectNode) m.getRecord().getData()).remove(COL_NAME)); + ((ObjectNode) m.getRecord().getData()).remove(COL_UPDATED_AT); + ((ObjectNode) m.getRecord().getData()).remove(COL_LAST_VISITED_AT); + ((ObjectNode) m.getRecord().getData()).remove(COL_LAST_COMMENT_AT); + ((ObjectNode) m.getRecord().getData()).remove(COL_WAKEUP_AT); + ((ObjectNode) m.getRecord().getData()).replace(COL_ID, + Jsons.jsonNode(m.getRecord().getData().get(COL_ID).asInt())); + }) + .collect(Collectors.toList()); } @Override @@ -114,20 +290,41 @@ protected List getTestMessages() { .withData(Jsons.jsonNode(ImmutableMap .of(COL_ID, ID_VALUE_1, COL_NAME, "picard", - COL_UPDATED_AT, "2004-10-19")))), + COL_UPDATED_AT, "2004-10-19", + COL_WAKEUP_AT, "10:10:10.123456-05:00", + COL_LAST_VISITED_AT, "2004-10-19T17:23:54.123456Z", + COL_LAST_COMMENT_AT, "2004-01-01T17:23:54.123456")))), new AirbyteMessage().withType(AirbyteMessage.Type.RECORD) .withRecord(new AirbyteRecordMessage().withStream(streamName).withNamespace(getDefaultNamespace()) .withData(Jsons.jsonNode(ImmutableMap .of(COL_ID, ID_VALUE_2, COL_NAME, "crusher", - COL_UPDATED_AT, - "2005-10-19")))), + COL_UPDATED_AT, "2005-10-19", + COL_WAKEUP_AT, "11:11:11.123456-05:00", + COL_LAST_VISITED_AT, "2005-10-19T17:23:54.123456Z", + COL_LAST_COMMENT_AT, "2005-01-01T17:23:54.123456")))), new AirbyteMessage().withType(AirbyteMessage.Type.RECORD) .withRecord(new AirbyteRecordMessage().withStream(streamName).withNamespace(getDefaultNamespace()) .withData(Jsons.jsonNode(ImmutableMap .of(COL_ID, ID_VALUE_3, COL_NAME, "vash", - COL_UPDATED_AT, "2006-10-19"))))); + COL_UPDATED_AT, "2006-10-19", + COL_WAKEUP_AT, "12:12:12.123456-05:00", + COL_LAST_VISITED_AT, "2006-10-19T17:23:54.123456Z", + COL_LAST_COMMENT_AT, "2006-01-01T17:23:54.123456"))))); + } + + protected void executeStatementReadIncrementallyTwice() throws SQLException { + database.execute(connection -> { + connection.createStatement().execute( + String.format( + "INSERT INTO %s(id, name, updated_at, wakeup_at, last_visited_at, last_comment_at) VALUES (4,'riker', '2006-10-19','12:12:12.123456-05:00','2006-10-19T17:23:54.123456Z','2006-01-01T17:23:54.123456')", + getFullyQualifiedTableName(TABLE_NAME))); + connection.createStatement().execute( + String.format( + "INSERT INTO %s(id, name, updated_at, wakeup_at, last_visited_at, last_comment_at) VALUES (5, 'data', '2006-10-19','12:12:12.123456-05:00','2006-10-19T17:23:54.123456Z','2006-01-01T17:23:54.123456')", + getFullyQualifiedTableName(TABLE_NAME))); + }); } @Override @@ -138,7 +335,10 @@ protected AirbyteCatalog getCatalog(final String defaultNamespace) { defaultNamespace, Field.of(COL_ID, JsonSchemaType.NUMBER), Field.of(COL_NAME, JsonSchemaType.STRING), - Field.of(COL_UPDATED_AT, JsonSchemaType.STRING_DATE)) + Field.of(COL_UPDATED_AT, JsonSchemaType.STRING_DATE), + Field.of(COL_WAKEUP_AT, JsonSchemaType.STRING_TIME_WITH_TIMEZONE), + Field.of(COL_LAST_VISITED_AT, JsonSchemaType.STRING_TIMESTAMP_WITH_TIMEZONE), + Field.of(COL_LAST_COMMENT_AT, JsonSchemaType.STRING_TIMESTAMP_WITHOUT_TIMEZONE)) .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) .withSourceDefinedPrimaryKey(List.of(List.of(COL_ID))), CatalogHelpers.createAirbyteStream( @@ -146,7 +346,10 @@ protected AirbyteCatalog getCatalog(final String defaultNamespace) { defaultNamespace, Field.of(COL_ID, JsonSchemaType.NUMBER), Field.of(COL_NAME, JsonSchemaType.STRING), - Field.of(COL_UPDATED_AT, JsonSchemaType.STRING_DATE)) + Field.of(COL_UPDATED_AT, JsonSchemaType.STRING_DATE), + Field.of(COL_WAKEUP_AT, JsonSchemaType.STRING_TIME_WITH_TIMEZONE), + Field.of(COL_LAST_VISITED_AT, JsonSchemaType.STRING_TIMESTAMP_WITH_TIMEZONE), + Field.of(COL_LAST_COMMENT_AT, JsonSchemaType.STRING_TIMESTAMP_WITHOUT_TIMEZONE)) .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) .withSourceDefinedPrimaryKey(Collections.emptyList()), CatalogHelpers.createAirbyteStream( @@ -154,14 +357,17 @@ protected AirbyteCatalog getCatalog(final String defaultNamespace) { defaultNamespace, Field.of(COL_FIRST_NAME, JsonSchemaType.STRING), Field.of(COL_LAST_NAME, JsonSchemaType.STRING), - Field.of(COL_UPDATED_AT, JsonSchemaType.STRING_DATE)) + Field.of(COL_UPDATED_AT, JsonSchemaType.STRING_DATE), + Field.of(COL_WAKEUP_AT, JsonSchemaType.STRING_TIME_WITH_TIMEZONE), + Field.of(COL_LAST_VISITED_AT, JsonSchemaType.STRING_TIMESTAMP_WITH_TIMEZONE), + Field.of(COL_LAST_COMMENT_AT, JsonSchemaType.STRING_TIMESTAMP_WITHOUT_TIMEZONE)) .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) .withSourceDefinedPrimaryKey( List.of(List.of(COL_FIRST_NAME), List.of(COL_LAST_NAME))))); } @Override - protected void incrementalTimestampCheck() throws Exception { + protected void incrementalDateCheck() throws Exception { super.incrementalCursorCheck(COL_UPDATED_AT, "2005-10-18", "2006-10-19", @@ -169,37 +375,71 @@ protected void incrementalTimestampCheck() throws Exception { getTestMessages().get(2))); } + @Test + void incrementalTimeTzCheck() throws Exception { + super.incrementalCursorCheck(COL_WAKEUP_AT, + "11:09:11.123456-05:00", + "12:12:12.123456-05:00", + Lists.newArrayList(getTestMessages().get(1), + getTestMessages().get(2))); + } + + @Test + void incrementalTimestampTzCheck() throws Exception { + super.incrementalCursorCheck(COL_LAST_VISITED_AT, + "2005-10-18T17:23:54.123456Z", + "2006-10-19T17:23:54.123456Z", + Lists.newArrayList(getTestMessages().get(1), + getTestMessages().get(2))); + } + + @Test + void incrementalTimestampCheck() throws Exception { + super.incrementalCursorCheck(COL_LAST_COMMENT_AT, + "2004-12-12T17:23:54.123456", + "2006-01-01T17:23:54.123456", + Lists.newArrayList(getTestMessages().get(1), + getTestMessages().get(2))); + } + @Override protected JdbcSourceOperations getSourceOperations() { return new PostgresSourceOperations(); } @Override - protected List getExpectedAirbyteMessagesSecondSync(String namespace) { + protected List getExpectedAirbyteMessagesSecondSync(final String namespace) { final List expectedMessages = new ArrayList<>(); expectedMessages.add(new AirbyteMessage().withType(AirbyteMessage.Type.RECORD) .withRecord(new AirbyteRecordMessage().withStream(streamName).withNamespace(namespace) .withData(Jsons.jsonNode(ImmutableMap .of(COL_ID, ID_VALUE_4, COL_NAME, "riker", - COL_UPDATED_AT, "2006-10-19"))))); + COL_UPDATED_AT, "2006-10-19", + COL_WAKEUP_AT, "12:12:12.123456-05:00", + COL_LAST_VISITED_AT, "2006-10-19T17:23:54.123456Z", + COL_LAST_COMMENT_AT, "2006-01-01T17:23:54.123456"))))); expectedMessages.add(new AirbyteMessage().withType(AirbyteMessage.Type.RECORD) .withRecord(new AirbyteRecordMessage().withStream(streamName).withNamespace(namespace) .withData(Jsons.jsonNode(ImmutableMap .of(COL_ID, ID_VALUE_5, COL_NAME, "data", - COL_UPDATED_AT, "2006-10-19"))))); - expectedMessages.add(new AirbyteMessage() - .withType(AirbyteMessage.Type.STATE) - .withState(new AirbyteStateMessage() - .withData(Jsons.jsonNode(new DbState() - .withCdc(false) - .withStreams(Lists.newArrayList(new DbStreamState() - .withStreamName(streamName) - .withStreamNamespace(namespace) - .withCursorField(ImmutableList.of(COL_ID)) - .withCursor("5"))))))); + COL_UPDATED_AT, "2006-10-19", + COL_WAKEUP_AT, "12:12:12.123456-05:00", + COL_LAST_VISITED_AT, "2006-10-19T17:23:54.123456Z", + COL_LAST_COMMENT_AT, "2006-01-01T17:23:54.123456"))))); + final DbStreamState state = new DbStreamState() + .withStreamName(streamName) + .withStreamNamespace(namespace) + .withCursorField(ImmutableList.of(COL_ID)) + .withCursor("5"); + expectedMessages.addAll(createExpectedTestMessages(List.of(state))); return expectedMessages; } + @Override + protected boolean supportsPerStream() { + return true; + } + } diff --git a/airbyte-integrations/connectors/source-postgres-strict-encrypt/src/test/resources/expected_spec.json b/airbyte-integrations/connectors/source-postgres/src/test/resources/expected_spec.json similarity index 100% rename from airbyte-integrations/connectors/source-postgres-strict-encrypt/src/test/resources/expected_spec.json rename to airbyte-integrations/connectors/source-postgres/src/test/resources/expected_spec.json diff --git a/airbyte-integrations/connectors/source-relational-db/Dockerfile b/airbyte-integrations/connectors/source-relational-db/Dockerfile index 5ae6986dbb99..36baea09eafa 100644 --- a/airbyte-integrations/connectors/source-relational-db/Dockerfile +++ b/airbyte-integrations/connectors/source-relational-db/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-relational-db COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.3.0 +LABEL io.airbyte.version=0.3.1 LABEL io.airbyte.name=airbyte/source-relational-db diff --git a/airbyte-integrations/connectors/source-relational-db/build.gradle b/airbyte-integrations/connectors/source-relational-db/build.gradle index 0f9ca0ea46b6..58cc47dfd17a 100644 --- a/airbyte-integrations/connectors/source-relational-db/build.gradle +++ b/airbyte-integrations/connectors/source-relational-db/build.gradle @@ -11,13 +11,14 @@ dependencies { implementation project(':airbyte-integrations:bases:base-java') implementation project(':airbyte-protocol:protocol-models') implementation project(':airbyte-json-validation') + implementation project(':airbyte-config:config-models') implementation 'org.apache.commons:commons-lang3:3.11' testImplementation project(':airbyte-test-utils') testImplementation libs.postgresql - testImplementation libs.testcontainers.postgresql + testImplementation libs.connectors.testcontainers.postgresql implementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) } diff --git a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/AbstractDbSource.java b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/AbstractDbSource.java index 6ebdc7aa751e..995a49a1f4d6 100644 --- a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/AbstractDbSource.java +++ b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/AbstractDbSource.java @@ -7,12 +7,16 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; +import io.airbyte.commons.features.EnvVariableFeatureFlags; +import io.airbyte.commons.features.FeatureFlags; import io.airbyte.commons.functional.CheckedConsumer; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.lang.Exceptions; import io.airbyte.commons.type.Types; import io.airbyte.commons.util.AutoCloseableIterator; import io.airbyte.commons.util.AutoCloseableIterators; +import io.airbyte.config.StateWrapper; +import io.airbyte.config.helpers.StateMessageHelper; import io.airbyte.db.AbstractDatabase; import io.airbyte.db.IncrementalUtils; import io.airbyte.db.jdbc.JdbcDatabase; @@ -20,12 +24,16 @@ import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.base.Source; import io.airbyte.integrations.source.relationaldb.models.DbState; +import io.airbyte.integrations.source.relationaldb.state.StateManager; +import io.airbyte.integrations.source.relationaldb.state.StateManagerFactory; import io.airbyte.protocol.models.AirbyteCatalog; import io.airbyte.protocol.models.AirbyteConnectionStatus; import io.airbyte.protocol.models.AirbyteConnectionStatus.Status; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteMessage.Type; import io.airbyte.protocol.models.AirbyteRecordMessage; +import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; import io.airbyte.protocol.models.AirbyteStream; import io.airbyte.protocol.models.CatalogHelpers; import io.airbyte.protocol.models.CommonField; @@ -60,6 +68,8 @@ public abstract class AbstractDbSource read(final JsonNode config, final ConfiguredAirbyteCatalog catalog, final JsonNode state) throws Exception { - final StateManager stateManager = new StateManager( - state == null ? StateManager.emptyState() : Jsons.object(state, DbState.class), - catalog); + final StateManager stateManager = + StateManagerFactory.createStateManager(getSupportedStateType(config), deserializeInitialState(state, config), catalog); final Instant emittedAt = Instant.now(); final Database database = createDatabaseInternal(config); @@ -509,4 +518,47 @@ private Database createDatabaseInternal(final JsonNode sourceConfig) throws Exce return database; } + /** + * Deserializes the state represented as JSON into an object representation. + * + * @param initialStateJson The state as JSON. + * @param config The connector configuration. + * @return The deserialized object representation of the state. + */ + protected List deserializeInitialState(final JsonNode initialStateJson, final JsonNode config) { + final Optional typedState = StateMessageHelper.getTypedState(initialStateJson, featureFlags.useStreamCapableState()); + return typedState.map((state) -> { + switch (state.getStateType()) { + case GLOBAL: + return List.of(state.getGlobal()); + case STREAM: + return state.getStateMessages(); + case LEGACY: + default: + return List.of(new AirbyteStateMessage().withType(AirbyteStateType.LEGACY).withData(state.getLegacyState())); + } + }).orElse(generateEmptyInitialState(config)); + } + + /** + * Generates an empty, initial state for use by the connector. + * + * @param config The connector configuration. + * @return The empty, initial state. + */ + protected List generateEmptyInitialState(final JsonNode config) { + // For backwards compatibility with existing connectors + return List.of(new AirbyteStateMessage().withType(AirbyteStateType.LEGACY).withData(Jsons.jsonNode(new DbState()))); + } + + /** + * Returns the {@link AirbyteStateType} supported by this connector. + * + * @param config The connector configuration. + * @return A {@link AirbyteStateType} representing the state supported by this connector. + */ + protected AirbyteStateType getSupportedStateType(final JsonNode config) { + return AirbyteStateType.LEGACY; + } + } diff --git a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/CdcStateManager.java b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/CdcStateManager.java index db33dfd6167b..7b855e6c9770 100644 --- a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/CdcStateManager.java +++ b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/CdcStateManager.java @@ -4,7 +4,6 @@ package io.airbyte.integrations.source.relationaldb; -import com.google.common.annotations.VisibleForTesting; import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.source.relationaldb.models.CdcState; import org.slf4j.Logger; @@ -12,14 +11,13 @@ public class CdcStateManager { - private static final Logger LOGGER = LoggerFactory.getLogger(StateManager.class); + private static final Logger LOGGER = LoggerFactory.getLogger(CdcStateManager.class); private final CdcState initialState; private CdcState currentState; - @VisibleForTesting - CdcStateManager(final CdcState serialized) { + public CdcStateManager(final CdcState serialized) { this.initialState = serialized; this.currentState = serialized; diff --git a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/StateDecoratingIterator.java b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/StateDecoratingIterator.java index 7900aed452c9..7eabaad9eb31 100644 --- a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/StateDecoratingIterator.java +++ b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/StateDecoratingIterator.java @@ -7,6 +7,7 @@ import com.google.common.collect.AbstractIterator; import io.airbyte.db.IncrementalUtils; import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; +import io.airbyte.integrations.source.relationaldb.state.StateManager; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteMessage.Type; import io.airbyte.protocol.models.AirbyteStateMessage; @@ -40,7 +41,11 @@ public StateDecoratingIterator(final Iterator messageIterator, this.cursorField = cursorField; this.cursorType = cursorType; this.maxCursor = initialCursor; - stateManager.setIsCdc(false); + } + + private String getCursorCandidate(final AirbyteMessage message) { + String cursorCandidate = message.getRecord().getData().get(cursorField).asText(); + return (cursorCandidate != null ? cursorCandidate.replaceAll("\u0000", "") : null); } @Override @@ -48,7 +53,7 @@ protected AirbyteMessage computeNext() { if (messageIterator.hasNext()) { final AirbyteMessage message = messageIterator.next(); if (message.getRecord().getData().hasNonNull(cursorField)) { - final String cursorCandidate = message.getRecord().getData().get(cursorField).asText(); + final String cursorCandidate = getCursorCandidate(message); if (IncrementalUtils.compareCursors(maxCursor, cursorCandidate, cursorType) < 0) { maxCursor = cursorCandidate; } diff --git a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/StateManager.java b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/StateManager.java deleted file mode 100644 index 3e509e2869d9..000000000000 --- a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/StateManager.java +++ /dev/null @@ -1,197 +0,0 @@ -/* - * Copyright (c) 2022 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.source.relationaldb; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import io.airbyte.commons.json.Jsons; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; -import io.airbyte.integrations.source.relationaldb.models.DbState; -import io.airbyte.integrations.source.relationaldb.models.DbStreamState; -import io.airbyte.protocol.models.AirbyteStateMessage; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.ConfiguredAirbyteStream; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Optional; -import java.util.Set; -import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Handles the state machine for the state of source implementations. - */ -public class StateManager { - - private static final Logger LOGGER = LoggerFactory.getLogger(StateManager.class); - - private final Map pairToCursorInfo; - private Boolean isCdc; - private final CdcStateManager cdcStateManager; - - public static DbState emptyState() { - return new DbState(); - } - - public StateManager(final DbState serialized, final ConfiguredAirbyteCatalog catalog) { - this.cdcStateManager = new CdcStateManager(serialized.getCdcState()); - this.isCdc = serialized.getCdc(); - if (serialized.getCdc() == null) { - this.isCdc = false; - } - - pairToCursorInfo = - new ImmutableMap.Builder().putAll(createCursorInfoMap(serialized, catalog)).build(); - } - - private static Map createCursorInfoMap(final DbState serialized, - final ConfiguredAirbyteCatalog catalog) { - final Set allStreamNames = catalog.getStreams() - .stream() - .map(ConfiguredAirbyteStream::getStream) - .map(AirbyteStreamNameNamespacePair::fromAirbyteSteam) - .collect(Collectors.toSet()); - allStreamNames.addAll(serialized.getStreams().stream().map(StateManager::toAirbyteStreamNameNamespacePair).collect(Collectors.toSet())); - - final Map localMap = new HashMap<>(); - final Map pairToState = serialized.getStreams() - .stream() - .collect(Collectors.toMap(StateManager::toAirbyteStreamNameNamespacePair, a -> a)); - final Map pairToConfiguredAirbyteStream = catalog.getStreams().stream() - .collect(Collectors.toMap(AirbyteStreamNameNamespacePair::fromConfiguredAirbyteSteam, s -> s)); - - for (final AirbyteStreamNameNamespacePair pair : allStreamNames) { - final Optional stateOptional = Optional.ofNullable(pairToState.get(pair)); - final Optional streamOptional = Optional.ofNullable(pairToConfiguredAirbyteStream.get(pair)); - localMap.put(pair, createCursorInfoForStream(pair, stateOptional, streamOptional)); - } - - return localMap; - } - - private static AirbyteStreamNameNamespacePair toAirbyteStreamNameNamespacePair(final DbStreamState state) { - return new AirbyteStreamNameNamespacePair(state.getStreamName(), state.getStreamNamespace()); - } - - @VisibleForTesting - @SuppressWarnings("OptionalUsedAsFieldOrParameterType") - static CursorInfo createCursorInfoForStream(final AirbyteStreamNameNamespacePair pair, - final Optional stateOptional, - final Optional streamOptional) { - final String originalCursorField = stateOptional - .map(DbStreamState::getCursorField) - .flatMap(f -> f.size() > 0 ? Optional.of(f.get(0)) : Optional.empty()) - .orElse(null); - final String originalCursor = stateOptional.map(DbStreamState::getCursor).orElse(null); - - final String cursor; - final String cursorField; - - // if cursor field is set in catalog. - if (streamOptional.map(ConfiguredAirbyteStream::getCursorField).isPresent()) { - cursorField = streamOptional - .map(ConfiguredAirbyteStream::getCursorField) - .flatMap(f -> f.size() > 0 ? Optional.of(f.get(0)) : Optional.empty()) - .orElse(null); - // if cursor field is set in state. - if (stateOptional.map(DbStreamState::getCursorField).isPresent()) { - // if cursor field in catalog and state are the same. - if (stateOptional.map(DbStreamState::getCursorField).equals(streamOptional.map(ConfiguredAirbyteStream::getCursorField))) { - cursor = stateOptional.map(DbStreamState::getCursor).orElse(null); - LOGGER.info("Found matching cursor in state. Stream: {}. Cursor Field: {} Value: {}", pair, cursorField, cursor); - // if cursor field in catalog and state are different. - } else { - cursor = null; - LOGGER.info( - "Found cursor field. Does not match previous cursor field. Stream: {}. Original Cursor Field: {}. New Cursor Field: {}. Resetting cursor value.", - pair, originalCursorField, cursorField); - } - // if cursor field is not set in state but is set in catalog. - } else { - LOGGER.info("No cursor field set in catalog but not present in state. Stream: {}, New Cursor Field: {}. Resetting cursor value", pair, - cursorField); - cursor = null; - } - // if cursor field is not set in catalog. - } else { - LOGGER.info( - "Cursor field set in state but not present in catalog. Stream: {}. Original Cursor Field: {}. Original value: {}. Resetting cursor.", - pair, originalCursorField, originalCursor); - cursorField = null; - cursor = null; - } - - return new CursorInfo(originalCursorField, originalCursor, cursorField, cursor); - } - - private Optional getCursorInfo(final AirbyteStreamNameNamespacePair pair) { - return Optional.ofNullable(pairToCursorInfo.get(pair)); - } - - public Optional getOriginalCursorField(final AirbyteStreamNameNamespacePair pair) { - return getCursorInfo(pair).map(CursorInfo::getOriginalCursorField); - } - - public Optional getOriginalCursor(final AirbyteStreamNameNamespacePair pair) { - return getCursorInfo(pair).map(CursorInfo::getOriginalCursor); - } - - public Optional getCursorField(final AirbyteStreamNameNamespacePair pair) { - return getCursorInfo(pair).map(CursorInfo::getCursorField); - } - - public Optional getCursor(final AirbyteStreamNameNamespacePair pair) { - return getCursorInfo(pair).map(CursorInfo::getCursor); - } - - synchronized public AirbyteStateMessage updateAndEmit(final AirbyteStreamNameNamespacePair pair, final String cursor) { - // cdc file gets updated by debezium so the "update" part is a no op. - if (!isCdc) { - final Optional cursorInfo = getCursorInfo(pair); - Preconditions.checkState(cursorInfo.isPresent(), "Could not find cursor information for stream: " + pair); - cursorInfo.get().setCursor(cursor); - } - - return toState(); - } - - public void setIsCdc(final boolean isCdc) { - if (this.isCdc == null) { - this.isCdc = isCdc; - } else { - Preconditions.checkState(this.isCdc == isCdc, "attempt to set cdc to {}, but is already set to {}.", isCdc, this.isCdc); - } - } - - public CdcStateManager getCdcStateManager() { - return cdcStateManager; - } - - public AirbyteStateMessage emit() { - return toState(); - } - - private AirbyteStateMessage toState() { - final DbState DbState = new DbState() - .withCdc(isCdc) - .withStreams(pairToCursorInfo.entrySet().stream() - .sorted(Entry.comparingByKey()) // sort by stream name then namespace for sanity. - .map(e -> new DbStreamState() - .withStreamName(e.getKey().getName()) - .withStreamNamespace(e.getKey().getNamespace()) - .withCursorField(e.getValue().getCursorField() == null ? Collections.emptyList() : Lists.newArrayList(e.getValue().getCursorField())) - .withCursor(e.getValue().getCursor())) - .collect(Collectors.toList())) - .withCdcState(cdcStateManager.getCdcState()); - - return new AirbyteStateMessage().withData(Jsons.jsonNode(DbState)); - } - -} diff --git a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/AbstractStateManager.java b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/AbstractStateManager.java new file mode 100644 index 000000000000..dec78ec39fac --- /dev/null +++ b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/AbstractStateManager.java @@ -0,0 +1,63 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.relationaldb.state; + +import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; +import io.airbyte.integrations.source.relationaldb.CursorInfo; +import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.function.Function; +import java.util.function.Supplier; + +/** + * Abstract implementation of the {@link StateManager} interface that provides common functionality + * for state manager implementations. + * + * @param The type associated with the state object managed by this manager. + * @param The type associated with the state object stored in the state managed by this manager. + */ +public abstract class AbstractStateManager implements StateManager { + + /** + * The {@link CursorManager} responsible for keeping track of the current cursor value for each + * stream managed by this state manager. + */ + private final CursorManager cursorManager; + + /** + * Constructs a new state manager for the given configured connector. + * + * @param catalog The connector's configured catalog. + * @param streamSupplier A {@link Supplier} that provides the cursor manager with the collection of + * streams tracked by the connector's state. + * @param cursorFunction A {@link Function} that extracts the current cursor from a stream stored in + * the connector's state. + * @param cursorFieldFunction A {@link Function} that extracts the cursor field name from a stream + * stored in the connector's state. + * @param namespacePairFunction A {@link Function} that generates a + * {@link AirbyteStreamNameNamespacePair} that identifies each stream in the connector's + * state. + */ + public AbstractStateManager(final ConfiguredAirbyteCatalog catalog, + final Supplier> streamSupplier, + final Function cursorFunction, + final Function> cursorFieldFunction, + final Function namespacePairFunction) { + cursorManager = new CursorManager(catalog, streamSupplier, cursorFunction, cursorFieldFunction, namespacePairFunction); + } + + @Override + public Map getPairToCursorInfoMap() { + return cursorManager.getPairToCursorInfo(); + } + + @Override + public abstract AirbyteStateMessage toState(final Optional pair); + +} diff --git a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/CursorManager.java b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/CursorManager.java new file mode 100644 index 000000000000..2fabade97726 --- /dev/null +++ b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/CursorManager.java @@ -0,0 +1,222 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.relationaldb.state; + +import com.google.common.annotations.VisibleForTesting; +import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; +import io.airbyte.integrations.source.relationaldb.CursorInfo; +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import io.airbyte.protocol.models.ConfiguredAirbyteStream; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.function.Function; +import java.util.function.Supplier; +import java.util.stream.Collectors; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Manages the map of streams to current cursor values for state management. + * + * @param The type that represents the stream object which holds the current cursor information + * in the state. + */ +public class CursorManager { + + private static final Logger LOGGER = LoggerFactory.getLogger(CursorManager.class); + + /** + * Map of streams (name/namespace tuple) to the current cursor information stored in the state. + */ + private final Map pairToCursorInfo; + + /** + * Constructs a new {@link CursorManager} based on the configured connector and current state + * information. + * + * @param catalog The connector's configured catalog. + * @param streamSupplier A {@link Supplier} that provides the cursor manager with the collection of + * streams tracked by the connector's state. + * @param cursorFunction A {@link Function} that extracts the current cursor from a stream stored in + * the connector's state. + * @param cursorFieldFunction A {@link Function} that extracts the cursor field name from a stream + * stored in the connector's state. + * @param namespacePairFunction A {@link Function} that generates a + * {@link AirbyteStreamNameNamespacePair} that identifies each stream in the connector's + * state. + */ + public CursorManager(final ConfiguredAirbyteCatalog catalog, + final Supplier> streamSupplier, + final Function cursorFunction, + final Function> cursorFieldFunction, + final Function namespacePairFunction) { + pairToCursorInfo = createCursorInfoMap(catalog, streamSupplier, cursorFunction, cursorFieldFunction, namespacePairFunction); + } + + /** + * Creates the cursor information map that associates stream name/namespace tuples with the current + * cursor information for that stream as stored in the connector's state. + * + * @param catalog The connector's configured catalog. + * @param streamSupplier A {@link Supplier} that provides the cursor manager with the collection of + * streams tracked by the connector's state. + * @param cursorFunction A {@link Function} that extracts the current cursor from a stream stored in + * the connector's state. + * @param cursorFieldFunction A {@link Function} that extracts the cursor field name from a stream + * stored in the connector's state. + * @param namespacePairFunction A {@link Function} that generates a + * {@link AirbyteStreamNameNamespacePair} that identifies each stream in the connector's + * state. + * @return A map of streams to current cursor information for the stream. + */ + @VisibleForTesting + protected Map createCursorInfoMap( + final ConfiguredAirbyteCatalog catalog, + final Supplier> streamSupplier, + final Function cursorFunction, + final Function> cursorFieldFunction, + final Function namespacePairFunction) { + final Set allStreamNames = catalog.getStreams() + .stream() + .map(ConfiguredAirbyteStream::getStream) + .map(AirbyteStreamNameNamespacePair::fromAirbyteSteam) + .collect(Collectors.toSet()); + allStreamNames.addAll(streamSupplier.get().stream().map(namespacePairFunction).filter(n -> n != null).collect(Collectors.toSet())); + + final Map localMap = new HashMap<>(); + final Map pairToState = streamSupplier.get() + .stream() + .collect(Collectors.toMap(namespacePairFunction, Function.identity())); + final Map pairToConfiguredAirbyteStream = catalog.getStreams().stream() + .collect(Collectors.toMap(AirbyteStreamNameNamespacePair::fromConfiguredAirbyteSteam, Function.identity())); + + for (final AirbyteStreamNameNamespacePair pair : allStreamNames) { + final Optional stateOptional = Optional.ofNullable(pairToState.get(pair)); + final Optional streamOptional = Optional.ofNullable(pairToConfiguredAirbyteStream.get(pair)); + localMap.put(pair, createCursorInfoForStream(pair, stateOptional, streamOptional, cursorFunction, cursorFieldFunction)); + } + + return localMap; + } + + /** + * Generates a {@link CursorInfo} object based on the data currently stored in the connector's state + * for the given stream. + * + * @param pair A {@link AirbyteStreamNameNamespacePair} that identifies a specific stream managed by + * the connector. + * @param stateOptional {@link Optional} containing the current state associated with the stream. + * @param streamOptional {@link Optional} containing the {@link ConfiguredAirbyteStream} associated + * with the stream. + * @param cursorFunction A {@link Function} that provides the current cursor from the state + * associated with the stream. + * @param cursorFieldFunction A {@link Function} that provides the cursor field name for the cursor + * stored in the state associated with the stream. + * @return A {@link CursorInfo} object based on the data currently stored in the connector's state + * for the given stream. + */ + @SuppressWarnings("OptionalUsedAsFieldOrParameterType") + @VisibleForTesting + protected CursorInfo createCursorInfoForStream(final AirbyteStreamNameNamespacePair pair, + final Optional stateOptional, + final Optional streamOptional, + final Function cursorFunction, + final Function> cursorFieldFunction) { + final String originalCursorField = stateOptional + .map(cursorFieldFunction) + .flatMap(f -> f.size() > 0 ? Optional.of(f.get(0)) : Optional.empty()) + .orElse(null); + final String originalCursor = stateOptional.map(cursorFunction).orElse(null); + + final String cursor; + final String cursorField; + + // if cursor field is set in catalog. + if (streamOptional.map(ConfiguredAirbyteStream::getCursorField).isPresent()) { + cursorField = streamOptional + .map(ConfiguredAirbyteStream::getCursorField) + .flatMap(f -> f.size() > 0 ? Optional.of(f.get(0)) : Optional.empty()) + .orElse(null); + // if cursor field is set in state. + if (stateOptional.map(cursorFieldFunction).isPresent()) { + // if cursor field in catalog and state are the same. + if (stateOptional.map(cursorFieldFunction).equals(streamOptional.map(ConfiguredAirbyteStream::getCursorField))) { + cursor = stateOptional.map(cursorFunction).orElse(null); + LOGGER.info("Found matching cursor in state. Stream: {}. Cursor Field: {} Value: {}", pair, cursorField, cursor); + // if cursor field in catalog and state are different. + } else { + cursor = null; + LOGGER.info( + "Found cursor field. Does not match previous cursor field. Stream: {}. Original Cursor Field: {}. New Cursor Field: {}. Resetting cursor value.", + pair, originalCursorField, cursorField); + } + // if cursor field is not set in state but is set in catalog. + } else { + LOGGER.info("No cursor field set in catalog but not present in state. Stream: {}, New Cursor Field: {}. Resetting cursor value", pair, + cursorField); + cursor = null; + } + // if cursor field is not set in catalog. + } else { + LOGGER.info( + "Cursor field set in state but not present in catalog. Stream: {}. Original Cursor Field: {}. Original value: {}. Resetting cursor.", + pair, originalCursorField, originalCursor); + cursorField = null; + cursor = null; + } + + return new CursorInfo(originalCursorField, originalCursor, cursorField, cursor); + } + + /** + * Retrieves a copy of the stream name/namespace tuple to current cursor information map. + * + * @return A copy of the stream name/namespace tuple to current cursor information map. + */ + public Map getPairToCursorInfo() { + return Map.copyOf(pairToCursorInfo); + } + + /** + * Retrieves an {@link Optional} possibly containing the current {@link CursorInfo} associated with + * the provided stream name/namespace tuple. + * + * @param pair The {@link AirbyteStreamNameNamespacePair} which identifies a stream. + * @return An {@link Optional} possibly containing the current {@link CursorInfo} associated with + * the provided stream name/namespace tuple. + */ + public Optional getCursorInfo(final AirbyteStreamNameNamespacePair pair) { + return Optional.ofNullable(pairToCursorInfo.get(pair)); + } + + /** + * Retrieves an {@link Optional} possibly containing the cursor field name associated with the + * cursor tracked in the state associated with the provided stream name/namespace tuple. + * + * @param pair The {@link AirbyteStreamNameNamespacePair} which identifies a stream. + * @return An {@link Optional} possibly containing the cursor field name associated with the cursor + * tracked in the state associated with the provided stream name/namespace tuple. + */ + public Optional getCursorField(final AirbyteStreamNameNamespacePair pair) { + return getCursorInfo(pair).map(CursorInfo::getCursorField); + } + + /** + * Retrieves an {@link Optional} possibly containing the cursor value tracked in the state + * associated with the provided stream name/namespace tuple. + * + * @param pair The {@link AirbyteStreamNameNamespacePair} which identifies a stream. + * @return An {@link Optional} possibly containing the cursor value tracked in the state associated + * with the provided stream name/namespace tuple. + */ + public Optional getCursor(final AirbyteStreamNameNamespacePair pair) { + return getCursorInfo(pair).map(CursorInfo::getCursor); + } + +} diff --git a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/GlobalStateManager.java b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/GlobalStateManager.java new file mode 100644 index 000000000000..41ae2a2e47b2 --- /dev/null +++ b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/GlobalStateManager.java @@ -0,0 +1,131 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.relationaldb.state; + +import static io.airbyte.integrations.source.relationaldb.state.StateGeneratorUtils.CURSOR_FIELD_FUNCTION; +import static io.airbyte.integrations.source.relationaldb.state.StateGeneratorUtils.CURSOR_FUNCTION; +import static io.airbyte.integrations.source.relationaldb.state.StateGeneratorUtils.NAME_NAMESPACE_PAIR_FUNCTION; + +import io.airbyte.commons.json.Jsons; +import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; +import io.airbyte.integrations.source.relationaldb.CdcStateManager; +import io.airbyte.integrations.source.relationaldb.models.CdcState; +import io.airbyte.integrations.source.relationaldb.models.DbState; +import io.airbyte.protocol.models.AirbyteGlobalState; +import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; +import io.airbyte.protocol.models.AirbyteStreamState; +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import io.airbyte.protocol.models.StreamDescriptor; +import java.util.Collection; +import java.util.List; +import java.util.Optional; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +/** + * Global implementation of the {@link StateManager} interface. + * + * This implementation generates a single, global state object for the state tracked by this + * manager. + */ +public class GlobalStateManager extends AbstractStateManager { + + /** + * Legacy {@link CdcStateManager} used to manage state for connectors that support Change Data + * Capture (CDC). + */ + private final CdcStateManager cdcStateManager; + + /** + * Constructs a new {@link GlobalStateManager} that is seeded with the provided + * {@link AirbyteStateMessage}. + * + * @param airbyteStateMessage The initial state represented as an {@link AirbyteStateMessage}. + * @param catalog The {@link ConfiguredAirbyteCatalog} for the connector associated with this state + * manager. + */ + public GlobalStateManager(final AirbyteStateMessage airbyteStateMessage, final ConfiguredAirbyteCatalog catalog) { + super(catalog, + getStreamsSupplier(airbyteStateMessage), + CURSOR_FUNCTION, + CURSOR_FIELD_FUNCTION, + NAME_NAMESPACE_PAIR_FUNCTION); + + this.cdcStateManager = new CdcStateManager(extractCdcState(airbyteStateMessage)); + } + + @Override + public CdcStateManager getCdcStateManager() { + return cdcStateManager; + } + + @Override + public AirbyteStateMessage toState(final Optional pair) { + // Populate global state + final AirbyteGlobalState globalState = new AirbyteGlobalState(); + globalState.setSharedState(Jsons.jsonNode(getCdcStateManager().getCdcState())); + globalState.setStreamStates(StateGeneratorUtils.generateStreamStateList(getPairToCursorInfoMap())); + + // Generate the legacy state for backwards compatibility + final DbState dbState = StateGeneratorUtils.generateDbState(getPairToCursorInfoMap()) + .withCdc(true) + .withCdcState(getCdcStateManager().getCdcState()); + + return new AirbyteStateMessage() + .withType(AirbyteStateType.GLOBAL) + // Temporarily include legacy state for backwards compatibility with the platform + .withData(Jsons.jsonNode(dbState)) + .withGlobal(globalState); + } + + /** + * Extracts the Change Data Capture (CDC) state stored in the initial state provided to this state + * manager. + * + * @param airbyteStateMessage The {@link AirbyteStateMessage} that contains the initial state + * provided to the state manager. + * @return The {@link CdcState} stored in the state, if any. Note that this will not be {@code null} + * but may be empty. + */ + private CdcState extractCdcState(final AirbyteStateMessage airbyteStateMessage) { + if (airbyteStateMessage.getType() == AirbyteStateType.GLOBAL) { + return Jsons.object(airbyteStateMessage.getGlobal().getSharedState(), CdcState.class); + } else { + final DbState legacyState = Jsons.object(airbyteStateMessage.getData(), DbState.class); + return legacyState != null ? legacyState.getCdcState() : null; + } + } + + /** + * Generates the {@link Supplier} that will be used to extract the streams from the incoming + * {@link AirbyteStateMessage}. + * + * @param airbyteStateMessage The {@link AirbyteStateMessage} supplied to this state manager with + * the initial state. + * @return A {@link Supplier} that will be used to fetch the streams present in the initial state. + */ + private static Supplier> getStreamsSupplier(final AirbyteStateMessage airbyteStateMessage) { + /* + * If the incoming message has the state type set to GLOBAL, it is using the new format. Therefore, + * we can look for streams in the "global" field of the message. Otherwise, the message is still + * storing state in the legacy "data" field. + */ + return () -> { + if (airbyteStateMessage.getType() == AirbyteStateType.GLOBAL) { + return airbyteStateMessage.getGlobal().getStreamStates(); + } else if (airbyteStateMessage.getData() != null) { + return Jsons.object(airbyteStateMessage.getData(), DbState.class).getStreams().stream() + .map(s -> new AirbyteStreamState().withStreamState(Jsons.jsonNode(s)) + .withStreamDescriptor(new StreamDescriptor().withNamespace(s.getStreamNamespace()).withName(s.getStreamName()))) + .collect( + Collectors.toList()); + } else { + return List.of(); + } + }; + } + +} diff --git a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/LegacyStateManager.java b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/LegacyStateManager.java new file mode 100644 index 000000000000..f0e0e2465c55 --- /dev/null +++ b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/LegacyStateManager.java @@ -0,0 +1,112 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.relationaldb.state; + +import com.google.common.base.Preconditions; +import io.airbyte.commons.json.Jsons; +import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; +import io.airbyte.integrations.source.relationaldb.CdcStateManager; +import io.airbyte.integrations.source.relationaldb.CursorInfo; +import io.airbyte.integrations.source.relationaldb.models.DbState; +import io.airbyte.integrations.source.relationaldb.models.DbStreamState; +import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import java.util.List; +import java.util.Optional; +import java.util.function.Function; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Legacy implementation (pre-per-stream state support) of the {@link StateManager} interface. + * + * This implementation assumes that the state matches the {@link DbState} object and effectively + * tracks state as global across the streams managed by a connector. + * + * @deprecated This manager may be removed in the future if/once all connectors support per-stream + * state management. + */ +@Deprecated(forRemoval = true) +public class LegacyStateManager extends AbstractStateManager { + + private static final Logger LOGGER = LoggerFactory.getLogger(LegacyStateManager.class); + + /** + * {@link Function} that extracts the cursor from the stream state. + */ + private static final Function CURSOR_FUNCTION = DbStreamState::getCursor; + + /** + * {@link Function} that extracts the cursor field(s) from the stream state. + */ + private static final Function> CURSOR_FIELD_FUNCTION = DbStreamState::getCursorField; + + /** + * {@link Function} that creates an {@link AirbyteStreamNameNamespacePair} from the stream state. + */ + private static final Function NAME_NAMESPACE_PAIR_FUNCTION = + s -> new AirbyteStreamNameNamespacePair(s.getStreamName(), s.getStreamNamespace()); + + /** + * Tracks whether the connector associated with this state manager supports CDC. + */ + private Boolean isCdc; + + /** + * {@link CdcStateManager} used to manage state for connectors that support CDC. + */ + private final CdcStateManager cdcStateManager; + + /** + * Constructs a new {@link LegacyStateManager} that is seeded with the provided {@link DbState} + * instance. + * + * @param dbState The initial state represented as an {@link DbState} instance. + * @param catalog The {@link ConfiguredAirbyteCatalog} for the connector associated with this state + * manager. + */ + public LegacyStateManager(final DbState dbState, final ConfiguredAirbyteCatalog catalog) { + super(catalog, + () -> dbState.getStreams(), + CURSOR_FUNCTION, + CURSOR_FIELD_FUNCTION, + NAME_NAMESPACE_PAIR_FUNCTION); + + this.cdcStateManager = new CdcStateManager(dbState.getCdcState()); + this.isCdc = dbState.getCdc(); + if (dbState.getCdc() == null) { + this.isCdc = false; + } + } + + @Override + public CdcStateManager getCdcStateManager() { + return cdcStateManager; + } + + @Override + public AirbyteStateMessage toState(final Optional pair) { + final DbState dbState = StateGeneratorUtils.generateDbState(getPairToCursorInfoMap()) + .withCdc(isCdc) + .withCdcState(getCdcStateManager().getCdcState()); + + LOGGER.info("Generated legacy state for {} streams", dbState.getStreams().size()); + return new AirbyteStateMessage().withType(AirbyteStateType.LEGACY).withData(Jsons.jsonNode(dbState)); + } + + @Override + public AirbyteStateMessage updateAndEmit(final AirbyteStreamNameNamespacePair pair, final String cursor) { + // cdc file gets updated by debezium so the "update" part is a no op. + if (!isCdc) { + final Optional cursorInfo = getCursorInfo(pair); + Preconditions.checkState(cursorInfo.isPresent(), "Could not find cursor information for stream: " + pair); + cursorInfo.get().setCursor(cursor); + } + + return toState(Optional.ofNullable(pair)); + } + +} diff --git a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/StateGeneratorUtils.java b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/StateGeneratorUtils.java new file mode 100644 index 000000000000..40fa957c71b5 --- /dev/null +++ b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/StateGeneratorUtils.java @@ -0,0 +1,203 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.relationaldb.state; + +import com.google.common.collect.Lists; +import io.airbyte.commons.json.Jsons; +import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; +import io.airbyte.integrations.source.relationaldb.CursorInfo; +import io.airbyte.integrations.source.relationaldb.models.DbState; +import io.airbyte.integrations.source.relationaldb.models.DbStreamState; +import io.airbyte.protocol.models.AirbyteGlobalState; +import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; +import io.airbyte.protocol.models.AirbyteStreamState; +import io.airbyte.protocol.models.StreamDescriptor; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Optional; +import java.util.function.Function; +import java.util.stream.Collectors; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Collection of utilities that facilitate the generation of state objects. + */ +public class StateGeneratorUtils { + + private static final Logger LOGGER = LoggerFactory.getLogger(StateGeneratorUtils.class); + + /** + * {@link Function} that extracts the cursor from the stream state. + */ + public static final Function CURSOR_FUNCTION = stream -> { + final Optional dbStreamState = StateGeneratorUtils.extractState(stream); + return dbStreamState.map(DbStreamState::getCursor).orElse(null); + }; + + /** + * {@link Function} that extracts the cursor field(s) from the stream state. + */ + public static final Function> CURSOR_FIELD_FUNCTION = stream -> { + final Optional dbStreamState = StateGeneratorUtils.extractState(stream); + if (dbStreamState.isPresent()) { + return dbStreamState.get().getCursorField(); + } else { + return List.of(); + } + }; + + /** + * {@link Function} that creates an {@link AirbyteStreamNameNamespacePair} from the stream state. + */ + public static final Function NAME_NAMESPACE_PAIR_FUNCTION = + s -> isValidStreamDescriptor(s.getStreamDescriptor()) + ? new AirbyteStreamNameNamespacePair(s.getStreamDescriptor().getName(), s.getStreamDescriptor().getNamespace()) + : null; + + private StateGeneratorUtils() {} + + /** + * Generates the stream state for the given stream and cursor information. + * + * @param airbyteStreamNameNamespacePair The stream. + * @param cursorInfo The current cursor. + * @return The {@link AirbyteStreamState} representing the current state of the stream. + */ + public static AirbyteStreamState generateStreamState(final AirbyteStreamNameNamespacePair airbyteStreamNameNamespacePair, + final CursorInfo cursorInfo) { + return new AirbyteStreamState() + .withStreamDescriptor( + new StreamDescriptor().withName(airbyteStreamNameNamespacePair.getName()).withNamespace(airbyteStreamNameNamespacePair.getNamespace())) + .withStreamState(Jsons.jsonNode(generateDbStreamState(airbyteStreamNameNamespacePair, cursorInfo))); + } + + /** + * Generates a list of valid stream states from the provided stream and cursor information. A stream + * state is considered to be valid if the stream has a valid descriptor (see + * {@link #isValidStreamDescriptor(StreamDescriptor)} for more details). + * + * @param pairToCursorInfoMap The map of stream name/namespace tuple to the current cursor + * information for that stream + * @return The list of stream states derived from the state information extracted from the provided + * map. + */ + public static List generateStreamStateList(final Map pairToCursorInfoMap) { + return pairToCursorInfoMap.entrySet().stream() + .sorted(Entry.comparingByKey()) + .map(e -> generateStreamState(e.getKey(), e.getValue())) + .filter(s -> isValidStreamDescriptor(s.getStreamDescriptor())) + .collect(Collectors.toList()); + } + + /** + * Generates the legacy global state for backwards compatibility. + * + * @param pairToCursorInfoMap The map of stream name/namespace tuple to the current cursor + * information for that stream + * @return The legacy {@link DbState}. + */ + public static DbState generateDbState(final Map pairToCursorInfoMap) { + return new DbState() + .withCdc(false) + .withStreams(pairToCursorInfoMap.entrySet().stream() + .sorted(Entry.comparingByKey()) // sort by stream name then namespace for sanity. + .map(e -> generateDbStreamState(e.getKey(), e.getValue())) + .collect(Collectors.toList())); + } + + /** + * Generates the {@link DbStreamState} for the given stream and cursor. + * + * @param airbyteStreamNameNamespacePair The stream. + * @param cursorInfo The current cursor. + * @return The {@link DbStreamState}. + */ + public static DbStreamState generateDbStreamState(final AirbyteStreamNameNamespacePair airbyteStreamNameNamespacePair, + final CursorInfo cursorInfo) { + return new DbStreamState() + .withStreamName(airbyteStreamNameNamespacePair.getName()) + .withStreamNamespace(airbyteStreamNameNamespacePair.getNamespace()) + .withCursorField(cursorInfo.getCursorField() == null ? Collections.emptyList() : Lists.newArrayList(cursorInfo.getCursorField())) + .withCursor(cursorInfo.getCursor()); + } + + /** + * Extracts the actual state from the {@link AirbyteStreamState} object. + * + * @param state The {@link AirbyteStreamState} that contains the actual stream state as JSON. + * @return An {@link Optional} possibly containing the deserialized representation of the stream + * state or an empty {@link Optional} if the state is not present or could not be + * deserialized. + */ + public static Optional extractState(final AirbyteStreamState state) { + try { + return Optional.ofNullable(Jsons.object(state.getStreamState(), DbStreamState.class)); + } catch (final IllegalArgumentException e) { + LOGGER.error("Unable to extract state.", e); + return Optional.empty(); + } + } + + /** + * Tests whether the provided {@link StreamDescriptor} is valid. A valid descriptor is defined as + * one that has a non-{@code null} name. + * + * See + * https://github.com/airbytehq/airbyte/blob/e63458fabb067978beb5eaa74d2bc130919b419f/docs/understanding-airbyte/airbyte-protocol.md + * for more details + * + * @param streamDescriptor A {@link StreamDescriptor} to be validated. + * @return {@code true} if the provided {@link StreamDescriptor} is valid or {@code false} if it is + * invalid. + */ + public static boolean isValidStreamDescriptor(final StreamDescriptor streamDescriptor) { + if (streamDescriptor != null) { + return streamDescriptor.getName() != null; + } else { + return false; + } + } + + /** + * Converts a {@link AirbyteStateType#LEGACY} state message into a {@link AirbyteStateType#GLOBAL} + * message. + * + * @param airbyteStateMessage A {@link AirbyteStateType#LEGACY} state message. + * @return A {@link AirbyteStateType#GLOBAL} state message. + */ + public static AirbyteStateMessage convertLegacyStateToGlobalState(final AirbyteStateMessage airbyteStateMessage) { + final DbState dbState = Jsons.object(airbyteStateMessage.getData(), DbState.class); + final AirbyteGlobalState globalState = new AirbyteGlobalState() + .withSharedState(Jsons.jsonNode(dbState.getCdcState())) + .withStreamStates(dbState.getStreams().stream() + .map(s -> new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName(s.getStreamName()).withNamespace(s.getStreamNamespace())) + .withStreamState(Jsons.jsonNode(s))) + .collect( + Collectors.toList())); + return new AirbyteStateMessage().withType(AirbyteStateType.GLOBAL).withGlobal(globalState); + } + + /** + * Converts a {@link AirbyteStateType#LEGACY} state message into a list of + * {@link AirbyteStateType#STREAM} messages. + * + * @param airbyteStateMessage A {@link AirbyteStateType#LEGACY} state message. + * @return A list {@link AirbyteStateType#STREAM} state messages. + */ + public static List convertLegacyStateToStreamState(final AirbyteStateMessage airbyteStateMessage) { + return Jsons.object(airbyteStateMessage.getData(), DbState.class).getStreams().stream() + .map(s -> new AirbyteStateMessage().withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withNamespace(s.getStreamNamespace()).withName(s.getStreamName())) + .withStreamState(Jsons.jsonNode(s)))) + .collect(Collectors.toList()); + } + +} diff --git a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/StateManager.java b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/StateManager.java new file mode 100644 index 000000000000..a4234454b06f --- /dev/null +++ b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/StateManager.java @@ -0,0 +1,150 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.relationaldb.state; + +import com.google.common.base.Preconditions; +import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; +import io.airbyte.integrations.source.relationaldb.CdcStateManager; +import io.airbyte.integrations.source.relationaldb.CursorInfo; +import io.airbyte.protocol.models.AirbyteStateMessage; +import java.util.Map; +import java.util.Optional; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Defines a manager that manages connector state. Connector state is used to keep track of the data + * synced by the connector. + * + * @param The type of the state maintained by the manager. + * @param The type of the stream(s) stored within the state maintained by the manager. + */ +public interface StateManager { + + Logger LOGGER = LoggerFactory.getLogger(StateManager.class); + + /** + * Retrieves the {@link CdcStateManager} associated with the state manager. + * + * @return The {@link CdcStateManager} + * @throws UnsupportedOperationException if the state manager does not support tracking change data + * capture (CDC) state. + */ + CdcStateManager getCdcStateManager(); + + /** + * Retrieves the map of stream name/namespace tuple to the current cursor information for that + * stream. + * + * @return The map of stream name/namespace tuple to the current cursor information for that stream + * as maintained by this state manager. + */ + Map getPairToCursorInfoMap(); + + /** + * Generates an {@link AirbyteStateMessage} that represents the current state contained in the state + * manager. + * + * @param pair The {@link AirbyteStreamNameNamespacePair} that represents a stream managed by the + * state manager. + * @return The {@link AirbyteStateMessage} that represents the current state contained in the state + * manager. + */ + AirbyteStateMessage toState(final Optional pair); + + /** + * Retrieves an {@link Optional} possibly containing the cursor value tracked in the state + * associated with the provided stream name/namespace tuple. + * + * @param pair The {@link AirbyteStreamNameNamespacePair} which identifies a stream. + * @return An {@link Optional} possibly containing the cursor value tracked in the state associated + * with the provided stream name/namespace tuple. + */ + default Optional getCursor(final AirbyteStreamNameNamespacePair pair) { + return getCursorInfo(pair).map(CursorInfo::getCursor); + } + + /** + * Retrieves an {@link Optional} possibly containing the cursor field name associated with the + * cursor tracked in the state associated with the provided stream name/namespace tuple. + * + * @param pair The {@link AirbyteStreamNameNamespacePair} which identifies a stream. + * @return An {@link Optional} possibly containing the cursor field name associated with the cursor + * tracked in the state associated with the provided stream name/namespace tuple. + */ + default Optional getCursorField(final AirbyteStreamNameNamespacePair pair) { + return getCursorInfo(pair).map(CursorInfo::getCursorField); + } + + /** + * Retrieves an {@link Optional} possibly containing the original cursor value tracked in the state + * associated with the provided stream name/namespace tuple. + * + * @param pair The {@link AirbyteStreamNameNamespacePair} which identifies a stream. + * @return An {@link Optional} possibly containing the original cursor value tracked in the state + * associated with the provided stream name/namespace tuple. + */ + default Optional getOriginalCursor(final AirbyteStreamNameNamespacePair pair) { + return getCursorInfo(pair).map(CursorInfo::getOriginalCursor); + } + + /** + * Retrieves an {@link Optional} possibly containing the original cursor field name associated with + * the cursor tracked in the state associated with the provided stream name/namespace tuple. + * + * @param pair The {@link AirbyteStreamNameNamespacePair} which identifies a stream. + * @return An {@link Optional} possibly containing the original cursor field name associated with + * the cursor tracked in the state associated with the provided stream name/namespace tuple. + */ + default Optional getOriginalCursorField(final AirbyteStreamNameNamespacePair pair) { + return getCursorInfo(pair).map(CursorInfo::getOriginalCursorField); + } + + /** + * Retrieves the current cursor information stored in the state manager for the steam name/namespace + * tuple. + * + * @param pair The {@link AirbyteStreamNameNamespacePair} that represents a stream managed by the + * state manager. + * @return {@link Optional} that potentially contains the current cursor information for the given + * stream name/namespace tuple. + */ + default Optional getCursorInfo(final AirbyteStreamNameNamespacePair pair) { + return Optional.ofNullable(getPairToCursorInfoMap().get(pair)); + } + + /** + * Emits the current state maintained by the manager as an {@link AirbyteStateMessage}. + * + * @param pair The {@link AirbyteStreamNameNamespacePair} that represents a stream managed by the + * state manager. + * @return An {@link AirbyteStateMessage} that represents the current state maintained by the state + * manager. + */ + default AirbyteStateMessage emit(final Optional pair) { + return toState(pair); + } + + /** + * Updates the cursor associated with the provided stream name/namespace pair and emits the current + * state maintained by the state manager. + * + * @param pair The {@link AirbyteStreamNameNamespacePair} that represents a stream managed by the + * state manager. + * @param cursor The new value for the cursor associated with the + * {@link AirbyteStreamNameNamespacePair} that represents a stream managed by the state + * manager. + * @return An {@link AirbyteStateMessage} that represents the current state maintained by the state + * manager. + */ + default AirbyteStateMessage updateAndEmit(final AirbyteStreamNameNamespacePair pair, final String cursor) { + final Optional cursorInfo = getCursorInfo(pair); + Preconditions.checkState(cursorInfo.isPresent(), "Could not find cursor information for stream: " + pair); + LOGGER.debug("Updating cursor value for {} to {}...", pair, cursor); + cursorInfo.get().setCursor(cursor); + return emit(Optional.ofNullable(pair)); + } + +} diff --git a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/StateManagerFactory.java b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/StateManagerFactory.java new file mode 100644 index 000000000000..9778921fee0f --- /dev/null +++ b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/StateManagerFactory.java @@ -0,0 +1,125 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.relationaldb.state; + +import io.airbyte.commons.json.Jsons; +import io.airbyte.integrations.source.relationaldb.models.DbState; +import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import java.util.ArrayList; +import java.util.List; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Factory class that creates {@link StateManager} instances based on the provided state. + */ +public class StateManagerFactory { + + private static final Logger LOGGER = LoggerFactory.getLogger(StateManagerFactory.class); + + /** + * Private constructor to prevent direct instantiation. + */ + private StateManagerFactory() {} + + /** + * Creates a {@link StateManager} based on the provided state object and catalog. This method will + * handle the conversion of the provided state to match the requested state manager based on the + * provided {@link AirbyteStateType}. + * + * @param supportedStateType The type of state supported by the connector. + * @param initialState The deserialized initial state that will be provided to the selected + * {@link StateManager}. + * @param catalog The {@link ConfiguredAirbyteCatalog} for the connector that will utilize the state + * manager. + * @return A newly created {@link StateManager} implementation based on the provided state. + */ + public static StateManager createStateManager(final AirbyteStateType supportedStateType, + final List initialState, + final ConfiguredAirbyteCatalog catalog) { + if (initialState != null && !initialState.isEmpty()) { + final AirbyteStateMessage airbyteStateMessage = initialState.get(0); + switch (supportedStateType) { + case LEGACY: + LOGGER.info("Legacy state manager selected to manage state object with type {}.", airbyteStateMessage.getType()); + return new LegacyStateManager(Jsons.object(airbyteStateMessage.getData(), DbState.class), catalog); + case GLOBAL: + LOGGER.info("Global state manager selected to manage state object with type {}.", airbyteStateMessage.getType()); + return new GlobalStateManager(generateGlobalState(airbyteStateMessage), catalog); + case STREAM: + default: + LOGGER.info("Stream state manager selected to manage state object with type {}.", airbyteStateMessage.getType()); + return new StreamStateManager(generateStreamState(initialState), catalog); + } + } else { + throw new IllegalArgumentException("Failed to create state manager due to empty state list."); + } + } + + /** + * Handles the conversion between a different state type and the global state. This method handles + * the following transitions: + *
    + *
  • Stream -> Global (not supported, results in {@link IllegalArgumentException}
  • + *
  • Legacy -> Global (supported)
  • + *
  • Global -> Global (supported/no conversion required)
  • + *
+ * + * @param airbyteStateMessage The current state that is to be converted to global state. + * @return The converted state message. + * @throws IllegalArgumentException if unable to convert between the given state type and global. + */ + private static AirbyteStateMessage generateGlobalState(final AirbyteStateMessage airbyteStateMessage) { + AirbyteStateMessage globalStateMessage = airbyteStateMessage; + + switch (airbyteStateMessage.getType()) { + case STREAM: + throw new IllegalArgumentException("Unable to convert connector state from stream to global. Please reset the connection to continue."); + case LEGACY: + globalStateMessage = StateGeneratorUtils.convertLegacyStateToGlobalState(airbyteStateMessage); + LOGGER.info("Legacy state converted to global state.", airbyteStateMessage.getType()); + break; + case GLOBAL: + default: + break; + } + + return globalStateMessage; + } + + /** + * Handles the conversion between a different state type and the stream state. This method handles + * the following transitions: + *
    + *
  • Global -> Stream (not supported, results in {@link IllegalArgumentException}
  • + *
  • Legacy -> Stream (supported)
  • + *
  • Stream -> Stream (supported/no conversion required)
  • + *
+ * + * @param states The list of current states. + * @return The converted state messages. + * @throws IllegalArgumentException if unable to convert between the given state type and stream. + */ + private static List generateStreamState(final List states) { + final AirbyteStateMessage airbyteStateMessage = states.get(0); + final List streamStates = new ArrayList<>(); + switch (airbyteStateMessage.getType()) { + case GLOBAL: + throw new IllegalArgumentException("Unable to convert connector state from global to stream. Please reset the connection to continue."); + case LEGACY: + streamStates.addAll(StateGeneratorUtils.convertLegacyStateToStreamState(airbyteStateMessage)); + break; + case STREAM: + default: + streamStates.addAll(states); + break; + } + + return streamStates; + } + +} diff --git a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/StreamStateManager.java b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/StreamStateManager.java new file mode 100644 index 000000000000..701fc099edcc --- /dev/null +++ b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/StreamStateManager.java @@ -0,0 +1,81 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.relationaldb.state; + +import static io.airbyte.integrations.source.relationaldb.state.StateGeneratorUtils.CURSOR_FIELD_FUNCTION; +import static io.airbyte.integrations.source.relationaldb.state.StateGeneratorUtils.CURSOR_FUNCTION; +import static io.airbyte.integrations.source.relationaldb.state.StateGeneratorUtils.NAME_NAMESPACE_PAIR_FUNCTION; + +import io.airbyte.commons.json.Jsons; +import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; +import io.airbyte.integrations.source.relationaldb.CdcStateManager; +import io.airbyte.integrations.source.relationaldb.CursorInfo; +import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; +import io.airbyte.protocol.models.AirbyteStreamState; +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Per-stream implementation of the {@link StateManager} interface. + * + * This implementation generates a state object for each stream detected in catalog/map of known + * streams to cursor information stored in this manager. + */ +public class StreamStateManager extends AbstractStateManager { + + private static final Logger LOGGER = LoggerFactory.getLogger(StreamStateManager.class); + + /** + * Constructs a new {@link StreamStateManager} that is seeded with the provided + * {@link AirbyteStateMessage}. + * + * @param airbyteStateMessages The initial state represented as a list of + * {@link AirbyteStateMessage}s. + * @param catalog The {@link ConfiguredAirbyteCatalog} for the connector associated with this state + * manager. + */ + public StreamStateManager(final List airbyteStateMessages, final ConfiguredAirbyteCatalog catalog) { + super(catalog, + () -> airbyteStateMessages.stream().map(a -> a.getStream()).collect(Collectors.toList()), + CURSOR_FUNCTION, + CURSOR_FIELD_FUNCTION, + NAME_NAMESPACE_PAIR_FUNCTION); + } + + @Override + public CdcStateManager getCdcStateManager() { + throw new UnsupportedOperationException("CDC state management not supported by stream state manager."); + } + + @Override + public AirbyteStateMessage toState(final Optional pair) { + if (pair.isPresent()) { + final Map pairToCursorInfoMap = getPairToCursorInfoMap(); + final Optional cursorInfo = Optional.ofNullable(pairToCursorInfoMap.get(pair.get())); + + if (cursorInfo.isPresent()) { + LOGGER.debug("Generating state message for {}...", pair); + return new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + // Temporarily include legacy state for backwards compatibility with the platform + .withData(Jsons.jsonNode(StateGeneratorUtils.generateDbState(pairToCursorInfoMap))) + .withStream(StateGeneratorUtils.generateStreamState(pair.get(), cursorInfo.get())); + } else { + LOGGER.warn("Cursor information could not be located in state for stream {}. Returning a new, empty state message...", pair); + return new AirbyteStateMessage().withType(AirbyteStateType.STREAM).withStream(new AirbyteStreamState()); + } + } else { + LOGGER.warn("Stream not provided. Returning a new, empty state message..."); + return new AirbyteStateMessage().withType(AirbyteStateType.STREAM).withStream(new AirbyteStreamState()); + } + } + +} diff --git a/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/AbstractDbSourceTest.java b/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/AbstractDbSourceTest.java new file mode 100644 index 000000000000..3ba7183b1cb2 --- /dev/null +++ b/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/AbstractDbSourceTest.java @@ -0,0 +1,91 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.relationaldb; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; + +import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.commons.features.EnvVariableFeatureFlags; +import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.resources.MoreResources; +import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; +import java.io.IOException; +import java.lang.reflect.Field; +import java.util.List; +import java.util.Map; +import org.junit.jupiter.api.Test; + +/** + * Test suite for the {@link AbstractDbSource} class. + */ +public class AbstractDbSourceTest { + + @Test + void testDeserializationOfLegacyState() throws IOException { + final AbstractDbSource dbSource = spy(AbstractDbSource.class); + final JsonNode config = mock(JsonNode.class); + + final String legacyStateJson = MoreResources.readResource("states/legacy.json"); + final JsonNode legacyState = Jsons.deserialize(legacyStateJson); + + final List result = dbSource.deserializeInitialState(legacyState, config); + assertEquals(1, result.size()); + assertEquals(AirbyteStateType.LEGACY, result.get(0).getType()); + } + + @Test + void testDeserializationOfGlobalState() throws IOException { + setEnv(EnvVariableFeatureFlags.USE_STREAM_CAPABLE_STATE, "true"); + final AbstractDbSource dbSource = spy(AbstractDbSource.class); + final JsonNode config = mock(JsonNode.class); + + final String globalStateJson = MoreResources.readResource("states/global.json"); + final JsonNode globalState = Jsons.deserialize(globalStateJson); + + final List result = dbSource.deserializeInitialState(globalState, config); + assertEquals(1, result.size()); + assertEquals(AirbyteStateType.GLOBAL, result.get(0).getType()); + } + + @Test + void testDeserializationOfStreamState() throws IOException { + setEnv(EnvVariableFeatureFlags.USE_STREAM_CAPABLE_STATE, "true"); + final AbstractDbSource dbSource = spy(AbstractDbSource.class); + final JsonNode config = mock(JsonNode.class); + + final String streamStateJson = MoreResources.readResource("states/per_stream.json"); + final JsonNode streamState = Jsons.deserialize(streamStateJson); + + final List result = dbSource.deserializeInitialState(streamState, config); + assertEquals(2, result.size()); + assertEquals(AirbyteStateType.STREAM, result.get(0).getType()); + } + + @Test + void testDeserializationOfNullState() throws IOException { + final AbstractDbSource dbSource = spy(AbstractDbSource.class); + final JsonNode config = mock(JsonNode.class); + + final List result = dbSource.deserializeInitialState(null, config); + assertEquals(1, result.size()); + assertEquals(dbSource.getSupportedStateType(config), result.get(0).getType()); + } + + public static void setEnv(final String key, final String value) { + try { + final Map env = System.getenv(); + final Class cl = env.getClass(); + final Field field = cl.getDeclaredField("m"); + field.setAccessible(true); + final Map writableEnv = (Map) field.get(env); + writableEnv.put(key, value); + } catch (final Exception e) { + throw new IllegalStateException("Failed to set environment variable", e); + } + } +} diff --git a/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/StateDecoratingIteratorTest.java b/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/StateDecoratingIteratorTest.java index ab521edf87e2..e464a95e40fa 100644 --- a/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/StateDecoratingIteratorTest.java +++ b/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/StateDecoratingIteratorTest.java @@ -14,6 +14,7 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.util.MoreIterators; import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; +import io.airbyte.integrations.source.relationaldb.state.StateManager; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteMessage.Type; import io.airbyte.protocol.models.AirbyteRecordMessage; @@ -41,6 +42,11 @@ class StateDecoratingIteratorTest { .withRecord(new AirbyteRecordMessage() .withData(Jsons.jsonNode(ImmutableMap.of(UUID_FIELD_NAME, "def")))); + private static final AirbyteMessage RECORD_MESSAGE3 = new AirbyteMessage() + .withType(Type.RECORD) + .withRecord(new AirbyteRecordMessage() + .withData(Jsons.jsonNode(ImmutableMap.of(UUID_FIELD_NAME, "abc\u0000")))); + private static Iterator messageIterator; private StateManager stateManager; private AirbyteStateMessage stateMessage; @@ -130,4 +136,22 @@ void testEmptyStream() { assertFalse(iterator.hasNext()); } + @Test + void testUnicodeNull() { + messageIterator = MoreIterators.of(RECORD_MESSAGE3); + when(stateManager.updateAndEmit(NAME_NAMESPACE_PAIR, "abc")).thenReturn(stateMessage); + + final StateDecoratingIterator iterator = new StateDecoratingIterator( + messageIterator, + stateManager, + NAME_NAMESPACE_PAIR, + UUID_FIELD_NAME, + null, + JsonSchemaPrimitive.STRING); + + assertEquals(RECORD_MESSAGE3, iterator.next()); + assertEquals(stateMessage, iterator.next().getState()); + assertFalse(iterator.hasNext()); + } + } diff --git a/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/StateManagerTest.java b/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/StateManagerTest.java deleted file mode 100644 index 9e64edb55b7e..000000000000 --- a/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/StateManagerTest.java +++ /dev/null @@ -1,192 +0,0 @@ -/* - * Copyright (c) 2022 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.source.relationaldb; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import io.airbyte.commons.json.Jsons; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; -import io.airbyte.integrations.source.relationaldb.models.DbState; -import io.airbyte.integrations.source.relationaldb.models.DbStreamState; -import io.airbyte.protocol.models.AirbyteStateMessage; -import io.airbyte.protocol.models.AirbyteStream; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.ConfiguredAirbyteStream; -import java.util.Collections; -import java.util.Comparator; -import java.util.Optional; -import java.util.stream.Collectors; -import org.junit.jupiter.api.Test; -import org.testcontainers.shaded.com.google.common.collect.Lists; - -class StateManagerTest { - - private static final String NAMESPACE = "public"; - private static final String STREAM_NAME1 = "cars"; - private static final AirbyteStreamNameNamespacePair NAME_NAMESPACE_PAIR1 = new AirbyteStreamNameNamespacePair(STREAM_NAME1, NAMESPACE); - private static final String STREAM_NAME2 = "bicycles"; - private static final AirbyteStreamNameNamespacePair NAME_NAMESPACE_PAIR2 = new AirbyteStreamNameNamespacePair(STREAM_NAME2, NAMESPACE); - private static final String STREAM_NAME3 = "stationary_bicycles"; - private static final String CURSOR_FIELD1 = "year"; - private static final String CURSOR_FIELD2 = "generation"; - private static final String CURSOR = "2000"; - - @Test - void testCreateCursorInfoCatalogAndStateSameCursorField() { - final CursorInfo actual = - StateManager.createCursorInfoForStream(NAME_NAMESPACE_PAIR1, getState(CURSOR_FIELD1, CURSOR), getCatalog(CURSOR_FIELD1)); - assertEquals(new CursorInfo(CURSOR_FIELD1, CURSOR, CURSOR_FIELD1, CURSOR), actual); - } - - @Test - void testCreateCursorInfoCatalogAndStateSameCursorFieldButNoCursor() { - final CursorInfo actual = - StateManager.createCursorInfoForStream(NAME_NAMESPACE_PAIR1, getState(CURSOR_FIELD1, null), getCatalog(CURSOR_FIELD1)); - assertEquals(new CursorInfo(CURSOR_FIELD1, null, CURSOR_FIELD1, null), actual); - } - - @Test - void testCreateCursorInfoCatalogAndStateChangeInCursorFieldName() { - final CursorInfo actual = - StateManager.createCursorInfoForStream(NAME_NAMESPACE_PAIR1, getState(CURSOR_FIELD1, CURSOR), getCatalog(CURSOR_FIELD2)); - assertEquals(new CursorInfo(CURSOR_FIELD1, CURSOR, CURSOR_FIELD2, null), actual); - } - - @Test - void testCreateCursorInfoCatalogAndNoState() { - final CursorInfo actual = StateManager - .createCursorInfoForStream(NAME_NAMESPACE_PAIR1, Optional.empty(), getCatalog(CURSOR_FIELD1)); - assertEquals(new CursorInfo(null, null, CURSOR_FIELD1, null), actual); - } - - @Test - void testCreateCursorInfoStateAndNoCatalog() { - final CursorInfo actual = StateManager - .createCursorInfoForStream(NAME_NAMESPACE_PAIR1, getState(CURSOR_FIELD1, CURSOR), Optional.empty()); - assertEquals(new CursorInfo(CURSOR_FIELD1, CURSOR, null, null), actual); - } - - // this is what full refresh looks like. - @Test - void testCreateCursorInfoNoCatalogAndNoState() { - final CursorInfo actual = StateManager - .createCursorInfoForStream(NAME_NAMESPACE_PAIR1, Optional.empty(), Optional.empty()); - assertEquals(new CursorInfo(null, null, null, null), actual); - } - - @Test - void testCreateCursorInfoStateAndCatalogButNoCursorField() { - final CursorInfo actual = StateManager - .createCursorInfoForStream(NAME_NAMESPACE_PAIR1, getState(CURSOR_FIELD1, CURSOR), getCatalog(null)); - assertEquals(new CursorInfo(CURSOR_FIELD1, CURSOR, null, null), actual); - } - - @SuppressWarnings("SameParameterValue") - private static Optional getState(final String cursorField, final String cursor) { - return Optional.of(new DbStreamState() - .withStreamName(STREAM_NAME1) - .withCursorField(Lists.newArrayList(cursorField)) - .withCursor(cursor)); - } - - private static Optional getCatalog(final String cursorField) { - return Optional.of(new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME1)) - .withCursorField(cursorField == null ? Collections.emptyList() : Lists.newArrayList(cursorField))); - } - - @Test - void testGetters() { - final DbState state = new DbState().withStreams(Lists.newArrayList( - new DbStreamState().withStreamName(STREAM_NAME1).withStreamNamespace(NAMESPACE).withCursorField(Lists.newArrayList(CURSOR_FIELD1)) - .withCursor(CURSOR), - new DbStreamState().withStreamName(STREAM_NAME2).withStreamNamespace(NAMESPACE))); - - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog() - .withStreams(Lists.newArrayList( - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME1).withNamespace(NAMESPACE)) - .withCursorField(Lists.newArrayList(CURSOR_FIELD1)), - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME2).withNamespace(NAMESPACE)))); - - final StateManager stateManager = new StateManager(state, catalog); - - assertEquals(Optional.of(CURSOR_FIELD1), stateManager.getOriginalCursorField(NAME_NAMESPACE_PAIR1)); - assertEquals(Optional.of(CURSOR), stateManager.getOriginalCursor(NAME_NAMESPACE_PAIR1)); - assertEquals(Optional.of(CURSOR_FIELD1), stateManager.getCursorField(NAME_NAMESPACE_PAIR1)); - assertEquals(Optional.of(CURSOR), stateManager.getCursor(NAME_NAMESPACE_PAIR1)); - - assertEquals(Optional.empty(), stateManager.getOriginalCursorField(NAME_NAMESPACE_PAIR2)); - assertEquals(Optional.empty(), stateManager.getOriginalCursor(NAME_NAMESPACE_PAIR2)); - assertEquals(Optional.empty(), stateManager.getCursorField(NAME_NAMESPACE_PAIR2)); - assertEquals(Optional.empty(), stateManager.getCursor(NAME_NAMESPACE_PAIR2)); - } - - @Test - void testToState() { - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog() - .withStreams(Lists.newArrayList( - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME1).withNamespace(NAMESPACE)) - .withCursorField(Lists.newArrayList(CURSOR_FIELD1)), - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME2).withNamespace(NAMESPACE)) - .withCursorField(Lists.newArrayList(CURSOR_FIELD2)), - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME3).withNamespace(NAMESPACE)))); - - final StateManager stateManager = new StateManager(new DbState(), catalog); - - final AirbyteStateMessage expectedFirstEmission = new AirbyteStateMessage() - .withData(Jsons.jsonNode(new DbState().withStreams(Lists - .newArrayList( - new DbStreamState().withStreamName(STREAM_NAME1).withStreamNamespace(NAMESPACE).withCursorField(Lists.newArrayList(CURSOR_FIELD1)) - .withCursor("a"), - new DbStreamState().withStreamName(STREAM_NAME2).withStreamNamespace(NAMESPACE).withCursorField(Lists.newArrayList(CURSOR_FIELD2)), - new DbStreamState().withStreamName(STREAM_NAME3).withStreamNamespace(NAMESPACE)) - .stream().sorted(Comparator.comparing(DbStreamState::getStreamName)).collect(Collectors.toList())) - .withCdc(false))); - final AirbyteStateMessage actualFirstEmission = stateManager.updateAndEmit(NAME_NAMESPACE_PAIR1, "a"); - assertEquals(expectedFirstEmission, actualFirstEmission); - final AirbyteStateMessage expectedSecondEmission = new AirbyteStateMessage() - .withData(Jsons.jsonNode(new DbState().withStreams(Lists - .newArrayList( - new DbStreamState().withStreamName(STREAM_NAME1).withStreamNamespace(NAMESPACE).withCursorField(Lists.newArrayList(CURSOR_FIELD1)) - .withCursor("a"), - new DbStreamState().withStreamName(STREAM_NAME2).withStreamNamespace(NAMESPACE).withCursorField(Lists.newArrayList(CURSOR_FIELD2)) - .withCursor("b"), - new DbStreamState().withStreamName(STREAM_NAME3).withStreamNamespace(NAMESPACE)) - .stream().sorted(Comparator.comparing(DbStreamState::getStreamName)).collect(Collectors.toList())) - .withCdc(false))); - final AirbyteStateMessage actualSecondEmission = stateManager.updateAndEmit(NAME_NAMESPACE_PAIR2, "b"); - assertEquals(expectedSecondEmission, actualSecondEmission); - } - - @Test - void testToStateNullCursorField() { - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog() - .withStreams(Lists.newArrayList( - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME1).withNamespace(NAMESPACE)) - .withCursorField(Lists.newArrayList(CURSOR_FIELD1)), - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME2).withNamespace(NAMESPACE)))); - final StateManager stateManager = new StateManager(new DbState(), catalog); - - final AirbyteStateMessage expectedFirstEmission = new AirbyteStateMessage() - .withData(Jsons.jsonNode(new DbState().withStreams(Lists - .newArrayList( - new DbStreamState().withStreamName(STREAM_NAME1).withStreamNamespace(NAMESPACE).withCursorField(Lists.newArrayList(CURSOR_FIELD1)) - .withCursor("a"), - new DbStreamState().withStreamName(STREAM_NAME2).withStreamNamespace(NAMESPACE)) - .stream().sorted(Comparator.comparing(DbStreamState::getStreamName)).collect(Collectors.toList())) - .withCdc(false))); - - final AirbyteStateMessage actualFirstEmission = stateManager.updateAndEmit(NAME_NAMESPACE_PAIR1, "a"); - assertEquals(expectedFirstEmission, actualFirstEmission); - } - -} diff --git a/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/CursorManagerTest.java b/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/CursorManagerTest.java new file mode 100644 index 000000000000..67b7fddc23f5 --- /dev/null +++ b/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/CursorManagerTest.java @@ -0,0 +1,140 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.relationaldb.state; + +import static io.airbyte.integrations.source.relationaldb.state.StateTestConstants.CURSOR; +import static io.airbyte.integrations.source.relationaldb.state.StateTestConstants.CURSOR_FIELD1; +import static io.airbyte.integrations.source.relationaldb.state.StateTestConstants.CURSOR_FIELD2; +import static io.airbyte.integrations.source.relationaldb.state.StateTestConstants.NAME_NAMESPACE_PAIR1; +import static io.airbyte.integrations.source.relationaldb.state.StateTestConstants.NAME_NAMESPACE_PAIR2; +import static io.airbyte.integrations.source.relationaldb.state.StateTestConstants.getCatalog; +import static io.airbyte.integrations.source.relationaldb.state.StateTestConstants.getState; +import static io.airbyte.integrations.source.relationaldb.state.StateTestConstants.getStream; +import static org.junit.jupiter.api.Assertions.assertEquals; + +import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; +import io.airbyte.integrations.source.relationaldb.CursorInfo; +import io.airbyte.integrations.source.relationaldb.models.DbStreamState; +import java.util.Collections; +import java.util.Optional; +import org.junit.jupiter.api.Test; + +/** + * Test suite for the {@link CursorManager} class. + */ +public class CursorManagerTest { + + @Test + void testCreateCursorInfoCatalogAndStateSameCursorField() { + final CursorManager cursorManager = createCursorManager(CURSOR_FIELD1, CURSOR, NAME_NAMESPACE_PAIR1); + final CursorInfo actual = cursorManager.createCursorInfoForStream( + NAME_NAMESPACE_PAIR1, + getState(CURSOR_FIELD1, CURSOR), + getStream(CURSOR_FIELD1), + DbStreamState::getCursor, + DbStreamState::getCursorField); + assertEquals(new CursorInfo(CURSOR_FIELD1, CURSOR, CURSOR_FIELD1, CURSOR), actual); + } + + @Test + void testCreateCursorInfoCatalogAndStateSameCursorFieldButNoCursor() { + final CursorManager cursorManager = createCursorManager(CURSOR_FIELD1, null, NAME_NAMESPACE_PAIR1); + final CursorInfo actual = cursorManager.createCursorInfoForStream( + NAME_NAMESPACE_PAIR1, + getState(CURSOR_FIELD1, null), + getStream(CURSOR_FIELD1), + DbStreamState::getCursor, + DbStreamState::getCursorField); + assertEquals(new CursorInfo(CURSOR_FIELD1, null, CURSOR_FIELD1, null), actual); + } + + @Test + void testCreateCursorInfoCatalogAndStateChangeInCursorFieldName() { + final CursorManager cursorManager = createCursorManager(CURSOR_FIELD1, CURSOR, NAME_NAMESPACE_PAIR1); + final CursorInfo actual = cursorManager.createCursorInfoForStream( + NAME_NAMESPACE_PAIR1, + getState(CURSOR_FIELD1, CURSOR), + getStream(CURSOR_FIELD2), + DbStreamState::getCursor, + DbStreamState::getCursorField); + assertEquals(new CursorInfo(CURSOR_FIELD1, CURSOR, CURSOR_FIELD2, null), actual); + } + + @Test + void testCreateCursorInfoCatalogAndNoState() { + final CursorManager cursorManager = createCursorManager(CURSOR_FIELD1, CURSOR, NAME_NAMESPACE_PAIR1); + final CursorInfo actual = cursorManager.createCursorInfoForStream( + NAME_NAMESPACE_PAIR1, + Optional.empty(), + getStream(CURSOR_FIELD1), + DbStreamState::getCursor, + DbStreamState::getCursorField); + assertEquals(new CursorInfo(null, null, CURSOR_FIELD1, null), actual); + } + + @Test + void testCreateCursorInfoStateAndNoCatalog() { + final CursorManager cursorManager = createCursorManager(CURSOR_FIELD1, CURSOR, NAME_NAMESPACE_PAIR1); + final CursorInfo actual = cursorManager.createCursorInfoForStream( + NAME_NAMESPACE_PAIR1, + getState(CURSOR_FIELD1, CURSOR), + Optional.empty(), + DbStreamState::getCursor, + DbStreamState::getCursorField); + assertEquals(new CursorInfo(CURSOR_FIELD1, CURSOR, null, null), actual); + } + + // this is what full refresh looks like. + @Test + void testCreateCursorInfoNoCatalogAndNoState() { + final CursorManager cursorManager = createCursorManager(CURSOR_FIELD1, CURSOR, NAME_NAMESPACE_PAIR1); + final CursorInfo actual = cursorManager.createCursorInfoForStream( + NAME_NAMESPACE_PAIR1, + Optional.empty(), + Optional.empty(), + DbStreamState::getCursor, + DbStreamState::getCursorField); + assertEquals(new CursorInfo(null, null, null, null), actual); + } + + @Test + void testCreateCursorInfoStateAndCatalogButNoCursorField() { + final CursorManager cursorManager = createCursorManager(CURSOR_FIELD1, CURSOR, NAME_NAMESPACE_PAIR1); + final CursorInfo actual = cursorManager.createCursorInfoForStream( + NAME_NAMESPACE_PAIR1, + getState(CURSOR_FIELD1, CURSOR), + getStream(null), + DbStreamState::getCursor, + DbStreamState::getCursorField); + assertEquals(new CursorInfo(CURSOR_FIELD1, CURSOR, null, null), actual); + } + + @Test + void testGetters() { + final CursorManager cursorManager = createCursorManager(CURSOR_FIELD1, CURSOR, NAME_NAMESPACE_PAIR1); + final CursorInfo actualCursorInfo = new CursorInfo(CURSOR_FIELD1, CURSOR, null, null); + + assertEquals(Optional.of(actualCursorInfo), cursorManager.getCursorInfo(NAME_NAMESPACE_PAIR1)); + assertEquals(Optional.empty(), cursorManager.getCursorField(NAME_NAMESPACE_PAIR1)); + assertEquals(Optional.empty(), cursorManager.getCursor(NAME_NAMESPACE_PAIR1)); + + assertEquals(Optional.empty(), cursorManager.getCursorInfo(NAME_NAMESPACE_PAIR2)); + assertEquals(Optional.empty(), cursorManager.getCursorField(NAME_NAMESPACE_PAIR2)); + assertEquals(Optional.empty(), cursorManager.getCursor(NAME_NAMESPACE_PAIR2)); + } + + private CursorManager createCursorManager(final String cursorField, + final String cursor, + final AirbyteStreamNameNamespacePair nameNamespacePair) { + final DbStreamState dbStreamState = getState(cursorField, cursor).get(); + return new CursorManager<>( + getCatalog(cursorField).orElse(null), + () -> Collections.singleton(dbStreamState), + DbStreamState::getCursor, + DbStreamState::getCursorField, + s -> nameNamespacePair); + } + +} diff --git a/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/GlobalStateManagerTest.java b/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/GlobalStateManagerTest.java new file mode 100644 index 000000000000..0a80b79c6f58 --- /dev/null +++ b/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/GlobalStateManagerTest.java @@ -0,0 +1,218 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.relationaldb.state; + +import static io.airbyte.integrations.source.relationaldb.state.StateTestConstants.CURSOR_FIELD1; +import static io.airbyte.integrations.source.relationaldb.state.StateTestConstants.CURSOR_FIELD2; +import static io.airbyte.integrations.source.relationaldb.state.StateTestConstants.NAMESPACE; +import static io.airbyte.integrations.source.relationaldb.state.StateTestConstants.NAME_NAMESPACE_PAIR1; +import static io.airbyte.integrations.source.relationaldb.state.StateTestConstants.STREAM_NAME1; +import static io.airbyte.integrations.source.relationaldb.state.StateTestConstants.STREAM_NAME2; +import static io.airbyte.integrations.source.relationaldb.state.StateTestConstants.STREAM_NAME3; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.mockito.Mockito.mock; + +import io.airbyte.commons.json.Jsons; +import io.airbyte.integrations.source.relationaldb.models.CdcState; +import io.airbyte.integrations.source.relationaldb.models.DbState; +import io.airbyte.integrations.source.relationaldb.models.DbStreamState; +import io.airbyte.protocol.models.AirbyteGlobalState; +import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; +import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.AirbyteStreamState; +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import io.airbyte.protocol.models.ConfiguredAirbyteStream; +import io.airbyte.protocol.models.StreamDescriptor; +import java.util.Comparator; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; +import org.junit.jupiter.api.Test; + +/** + * Test suite for the {@link GlobalStateManager} class. + */ +public class GlobalStateManagerTest { + + @Test + void testCdcStateManager() { + final ConfiguredAirbyteCatalog catalog = mock(ConfiguredAirbyteCatalog.class); + final CdcState cdcState = new CdcState().withState(Jsons.jsonNode(Map.of("foo", "bar", "baz", 5))); + final AirbyteGlobalState globalState = new AirbyteGlobalState().withSharedState(Jsons.jsonNode(cdcState)) + .withStreamStates(List.of(new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withNamespace("namespace").withName("name")) + .withStreamState(Jsons.jsonNode(new DbStreamState())))); + final StateManager stateManager = + new GlobalStateManager(new AirbyteStateMessage().withType(AirbyteStateType.GLOBAL).withGlobal(globalState), catalog); + assertNotNull(stateManager.getCdcStateManager()); + assertEquals(cdcState, stateManager.getCdcStateManager().getCdcState()); + } + + @Test + void testToStateFromLegacyState() { + final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog() + .withStreams(List.of( + new ConfiguredAirbyteStream() + .withStream(new AirbyteStream().withName(STREAM_NAME1).withNamespace(NAMESPACE)) + .withCursorField(List.of(CURSOR_FIELD1)), + new ConfiguredAirbyteStream() + .withStream(new AirbyteStream().withName(STREAM_NAME2).withNamespace(NAMESPACE)) + .withCursorField(List.of(CURSOR_FIELD2)), + new ConfiguredAirbyteStream() + .withStream(new AirbyteStream().withName(STREAM_NAME3).withNamespace(NAMESPACE)))); + + final CdcState cdcState = new CdcState().withState(Jsons.jsonNode(Map.of("foo", "bar", "baz", 5))); + final DbState dbState = new DbState() + .withCdc(true) + .withCdcState(cdcState) + .withStreams(List.of( + new DbStreamState() + .withStreamName(STREAM_NAME1) + .withStreamNamespace(NAMESPACE) + .withCursorField(List.of(CURSOR_FIELD1)) + .withCursor("a"), + new DbStreamState() + .withStreamName(STREAM_NAME2) + .withStreamNamespace(NAMESPACE) + .withCursorField(List.of(CURSOR_FIELD2)), + new DbStreamState() + .withStreamName(STREAM_NAME3) + .withStreamNamespace(NAMESPACE)) + .stream().sorted(Comparator.comparing(DbStreamState::getStreamName)).collect(Collectors.toList())); + final StateManager stateManager = new GlobalStateManager(new AirbyteStateMessage().withData(Jsons.jsonNode(dbState)), catalog); + + final DbState expectedDbState = new DbState() + .withCdc(true) + .withCdcState(cdcState) + .withStreams(List.of( + new DbStreamState() + .withStreamName(STREAM_NAME1) + .withStreamNamespace(NAMESPACE) + .withCursorField(List.of(CURSOR_FIELD1)) + .withCursor("a"), + new DbStreamState() + .withStreamName(STREAM_NAME2) + .withStreamNamespace(NAMESPACE) + .withCursorField(List.of(CURSOR_FIELD2)), + new DbStreamState() + .withStreamName(STREAM_NAME3) + .withStreamNamespace(NAMESPACE)) + .stream().sorted(Comparator.comparing(DbStreamState::getStreamName)).collect(Collectors.toList())); + + final AirbyteGlobalState expectedGlobalState = new AirbyteGlobalState() + .withSharedState(Jsons.jsonNode(cdcState)) + .withStreamStates(List.of( + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName(STREAM_NAME1).withNamespace(NAMESPACE)) + .withStreamState(Jsons.jsonNode(new DbStreamState() + .withStreamName(STREAM_NAME1) + .withStreamNamespace(NAMESPACE) + .withCursorField(List.of(CURSOR_FIELD1)) + .withCursor("a"))), + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName(STREAM_NAME2).withNamespace(NAMESPACE)) + .withStreamState(Jsons.jsonNode(new DbStreamState() + .withStreamName(STREAM_NAME2) + .withStreamNamespace(NAMESPACE) + .withCursorField(List.of(CURSOR_FIELD2)))), + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName(STREAM_NAME3).withNamespace(NAMESPACE)) + .withStreamState(Jsons.jsonNode(new DbStreamState() + .withStreamName(STREAM_NAME3) + .withStreamNamespace(NAMESPACE)))) + .stream().sorted(Comparator.comparing(o -> o.getStreamDescriptor().getName())).collect(Collectors.toList())); + final AirbyteStateMessage expected = new AirbyteStateMessage() + .withData(Jsons.jsonNode(expectedDbState)) + .withGlobal(expectedGlobalState) + .withType(AirbyteStateType.GLOBAL); + + final AirbyteStateMessage actualFirstEmission = stateManager.updateAndEmit(NAME_NAMESPACE_PAIR1, "a"); + assertEquals(expected, actualFirstEmission); + } + + @Test + void testToState() { + final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog() + .withStreams(List.of( + new ConfiguredAirbyteStream() + .withStream(new AirbyteStream().withName(STREAM_NAME1).withNamespace(NAMESPACE)) + .withCursorField(List.of(CURSOR_FIELD1)), + new ConfiguredAirbyteStream() + .withStream(new AirbyteStream().withName(STREAM_NAME2).withNamespace(NAMESPACE)) + .withCursorField(List.of(CURSOR_FIELD2)), + new ConfiguredAirbyteStream() + .withStream(new AirbyteStream().withName(STREAM_NAME3).withNamespace(NAMESPACE)))); + + final CdcState cdcState = new CdcState().withState(Jsons.jsonNode(Map.of("foo", "bar", "baz", 5))); + final AirbyteGlobalState globalState = new AirbyteGlobalState().withSharedState(Jsons.jsonNode(new DbState())).withStreamStates( + List.of(new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor()).withStreamState(Jsons.jsonNode(new DbStreamState())))); + final StateManager stateManager = + new GlobalStateManager(new AirbyteStateMessage().withType(AirbyteStateType.GLOBAL).withGlobal(globalState), catalog); + stateManager.getCdcStateManager().setCdcState(cdcState); + + final DbState expectedDbState = new DbState() + .withCdc(true) + .withCdcState(cdcState) + .withStreams(List.of( + new DbStreamState() + .withStreamName(STREAM_NAME1) + .withStreamNamespace(NAMESPACE) + .withCursorField(List.of(CURSOR_FIELD1)) + .withCursor("a"), + new DbStreamState() + .withStreamName(STREAM_NAME2) + .withStreamNamespace(NAMESPACE) + .withCursorField(List.of(CURSOR_FIELD2)), + new DbStreamState() + .withStreamName(STREAM_NAME3) + .withStreamNamespace(NAMESPACE)) + .stream().sorted(Comparator.comparing(DbStreamState::getStreamName)).collect(Collectors.toList())); + + final AirbyteGlobalState expectedGlobalState = new AirbyteGlobalState() + .withSharedState(Jsons.jsonNode(cdcState)) + .withStreamStates(List.of( + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName(STREAM_NAME1).withNamespace(NAMESPACE)) + .withStreamState(Jsons.jsonNode(new DbStreamState() + .withStreamName(STREAM_NAME1) + .withStreamNamespace(NAMESPACE) + .withCursorField(List.of(CURSOR_FIELD1)) + .withCursor("a"))), + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName(STREAM_NAME2).withNamespace(NAMESPACE)) + .withStreamState(Jsons.jsonNode(new DbStreamState() + .withStreamName(STREAM_NAME2) + .withStreamNamespace(NAMESPACE) + .withCursorField(List.of(CURSOR_FIELD2)))), + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName(STREAM_NAME3).withNamespace(NAMESPACE)) + .withStreamState(Jsons.jsonNode(new DbStreamState() + .withStreamName(STREAM_NAME3) + .withStreamNamespace(NAMESPACE)))) + .stream().sorted(Comparator.comparing(o -> o.getStreamDescriptor().getName())).collect(Collectors.toList())); + final AirbyteStateMessage expected = new AirbyteStateMessage() + .withData(Jsons.jsonNode(expectedDbState)) + .withGlobal(expectedGlobalState) + .withType(AirbyteStateType.GLOBAL); + + final AirbyteStateMessage actualFirstEmission = stateManager.updateAndEmit(NAME_NAMESPACE_PAIR1, "a"); + assertEquals(expected, actualFirstEmission); + } + + @Test + void testToStateWithNoState() { + final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog(); + final StateManager stateManager = + new GlobalStateManager(new AirbyteStateMessage(), catalog); + + final AirbyteStateMessage airbyteStateMessage = stateManager.toState(Optional.empty()); + assertNotNull(airbyteStateMessage); + assertEquals(AirbyteStateType.GLOBAL, airbyteStateMessage.getType()); + assertEquals(0, airbyteStateMessage.getGlobal().getStreamStates().size()); + } + +} diff --git a/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/LegacyStateManagerTest.java b/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/LegacyStateManagerTest.java new file mode 100644 index 000000000000..9dd279a7edab --- /dev/null +++ b/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/LegacyStateManagerTest.java @@ -0,0 +1,181 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.relationaldb.state; + +import static io.airbyte.integrations.source.relationaldb.state.StateTestConstants.CURSOR; +import static io.airbyte.integrations.source.relationaldb.state.StateTestConstants.CURSOR_FIELD1; +import static io.airbyte.integrations.source.relationaldb.state.StateTestConstants.CURSOR_FIELD2; +import static io.airbyte.integrations.source.relationaldb.state.StateTestConstants.NAMESPACE; +import static io.airbyte.integrations.source.relationaldb.state.StateTestConstants.NAME_NAMESPACE_PAIR1; +import static io.airbyte.integrations.source.relationaldb.state.StateTestConstants.NAME_NAMESPACE_PAIR2; +import static io.airbyte.integrations.source.relationaldb.state.StateTestConstants.STREAM_NAME1; +import static io.airbyte.integrations.source.relationaldb.state.StateTestConstants.STREAM_NAME2; +import static io.airbyte.integrations.source.relationaldb.state.StateTestConstants.STREAM_NAME3; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.mockito.Mockito.mock; + +import io.airbyte.commons.json.Jsons; +import io.airbyte.integrations.source.relationaldb.models.CdcState; +import io.airbyte.integrations.source.relationaldb.models.DbState; +import io.airbyte.integrations.source.relationaldb.models.DbStreamState; +import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; +import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import io.airbyte.protocol.models.ConfiguredAirbyteStream; +import java.util.Comparator; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; +import org.junit.jupiter.api.Test; + +/** + * Test suite for the {@link LegacyStateManager} class. + */ +public class LegacyStateManagerTest { + + @Test + void testGetters() { + final DbState state = new DbState().withStreams(List.of( + new DbStreamState().withStreamName(STREAM_NAME1).withStreamNamespace(NAMESPACE).withCursorField(List.of(CURSOR_FIELD1)) + .withCursor(CURSOR), + new DbStreamState().withStreamName(STREAM_NAME2).withStreamNamespace(NAMESPACE))); + + final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog() + .withStreams(List.of( + new ConfiguredAirbyteStream() + .withStream(new AirbyteStream().withName(STREAM_NAME1).withNamespace(NAMESPACE)) + .withCursorField(List.of(CURSOR_FIELD1)), + new ConfiguredAirbyteStream() + .withStream(new AirbyteStream().withName(STREAM_NAME2).withNamespace(NAMESPACE)))); + + final StateManager stateManager = new LegacyStateManager(state, catalog); + + assertEquals(Optional.of(CURSOR_FIELD1), stateManager.getOriginalCursorField(NAME_NAMESPACE_PAIR1)); + assertEquals(Optional.of(CURSOR), stateManager.getOriginalCursor(NAME_NAMESPACE_PAIR1)); + assertEquals(Optional.of(CURSOR_FIELD1), stateManager.getCursorField(NAME_NAMESPACE_PAIR1)); + assertEquals(Optional.of(CURSOR), stateManager.getCursor(NAME_NAMESPACE_PAIR1)); + + assertEquals(Optional.empty(), stateManager.getOriginalCursorField(NAME_NAMESPACE_PAIR2)); + assertEquals(Optional.empty(), stateManager.getOriginalCursor(NAME_NAMESPACE_PAIR2)); + assertEquals(Optional.empty(), stateManager.getCursorField(NAME_NAMESPACE_PAIR2)); + assertEquals(Optional.empty(), stateManager.getCursor(NAME_NAMESPACE_PAIR2)); + } + + @Test + void testToState() { + final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog() + .withStreams(List.of( + new ConfiguredAirbyteStream() + .withStream(new AirbyteStream().withName(STREAM_NAME1).withNamespace(NAMESPACE)) + .withCursorField(List.of(CURSOR_FIELD1)), + new ConfiguredAirbyteStream() + .withStream(new AirbyteStream().withName(STREAM_NAME2).withNamespace(NAMESPACE)) + .withCursorField(List.of(CURSOR_FIELD2)), + new ConfiguredAirbyteStream() + .withStream(new AirbyteStream().withName(STREAM_NAME3).withNamespace(NAMESPACE)))); + + final StateManager stateManager = new LegacyStateManager(new DbState(), catalog); + + final AirbyteStateMessage expectedFirstEmission = new AirbyteStateMessage() + .withType(AirbyteStateType.LEGACY) + .withData(Jsons.jsonNode(new DbState().withStreams(List.of( + new DbStreamState().withStreamName(STREAM_NAME1).withStreamNamespace(NAMESPACE).withCursorField(List.of(CURSOR_FIELD1)) + .withCursor("a"), + new DbStreamState().withStreamName(STREAM_NAME2).withStreamNamespace(NAMESPACE).withCursorField(List.of(CURSOR_FIELD2)), + new DbStreamState().withStreamName(STREAM_NAME3).withStreamNamespace(NAMESPACE)) + .stream().sorted(Comparator.comparing(DbStreamState::getStreamName)).collect(Collectors.toList())) + .withCdc(false))); + final AirbyteStateMessage actualFirstEmission = stateManager.updateAndEmit(NAME_NAMESPACE_PAIR1, "a"); + assertEquals(expectedFirstEmission, actualFirstEmission); + final AirbyteStateMessage expectedSecondEmission = new AirbyteStateMessage() + .withType(AirbyteStateType.LEGACY) + .withData(Jsons.jsonNode(new DbState().withStreams(List.of( + new DbStreamState().withStreamName(STREAM_NAME1).withStreamNamespace(NAMESPACE).withCursorField(List.of(CURSOR_FIELD1)) + .withCursor("a"), + new DbStreamState().withStreamName(STREAM_NAME2).withStreamNamespace(NAMESPACE).withCursorField(List.of(CURSOR_FIELD2)) + .withCursor("b"), + new DbStreamState().withStreamName(STREAM_NAME3).withStreamNamespace(NAMESPACE)) + .stream().sorted(Comparator.comparing(DbStreamState::getStreamName)).collect(Collectors.toList())) + .withCdc(false))); + final AirbyteStateMessage actualSecondEmission = stateManager.updateAndEmit(NAME_NAMESPACE_PAIR2, "b"); + assertEquals(expectedSecondEmission, actualSecondEmission); + } + + @Test + void testToStateNullCursorField() { + final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog() + .withStreams(List.of( + new ConfiguredAirbyteStream() + .withStream(new AirbyteStream().withName(STREAM_NAME1).withNamespace(NAMESPACE)) + .withCursorField(List.of(CURSOR_FIELD1)), + new ConfiguredAirbyteStream() + .withStream(new AirbyteStream().withName(STREAM_NAME2).withNamespace(NAMESPACE)))); + final StateManager stateManager = new LegacyStateManager(new DbState(), catalog); + + final AirbyteStateMessage expectedFirstEmission = new AirbyteStateMessage() + .withType(AirbyteStateType.LEGACY) + .withData(Jsons.jsonNode(new DbState().withStreams(List.of( + new DbStreamState().withStreamName(STREAM_NAME1).withStreamNamespace(NAMESPACE).withCursorField(List.of(CURSOR_FIELD1)) + .withCursor("a"), + new DbStreamState().withStreamName(STREAM_NAME2).withStreamNamespace(NAMESPACE)) + .stream().sorted(Comparator.comparing(DbStreamState::getStreamName)).collect(Collectors.toList())) + .withCdc(false))); + + final AirbyteStateMessage actualFirstEmission = stateManager.updateAndEmit(NAME_NAMESPACE_PAIR1, "a"); + assertEquals(expectedFirstEmission, actualFirstEmission); + } + + @Test + void testCursorNotUpdatedForCdc() { + final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog() + .withStreams(List.of( + new ConfiguredAirbyteStream() + .withStream(new AirbyteStream().withName(STREAM_NAME1).withNamespace(NAMESPACE)) + .withCursorField(List.of(CURSOR_FIELD1)), + new ConfiguredAirbyteStream() + .withStream(new AirbyteStream().withName(STREAM_NAME2).withNamespace(NAMESPACE)))); + + final DbState state = new DbState(); + state.setCdc(true); + final StateManager stateManager = new LegacyStateManager(state, catalog); + + final AirbyteStateMessage expectedFirstEmission = new AirbyteStateMessage() + .withType(AirbyteStateType.LEGACY) + .withData(Jsons.jsonNode(new DbState().withStreams(List.of( + new DbStreamState().withStreamName(STREAM_NAME1).withStreamNamespace(NAMESPACE).withCursorField(List.of(CURSOR_FIELD1)) + .withCursor(null), + new DbStreamState().withStreamName(STREAM_NAME2).withStreamNamespace(NAMESPACE).withCursorField(List.of())) + .stream().sorted(Comparator.comparing(DbStreamState::getStreamName)).collect(Collectors.toList())) + .withCdc(true))); + final AirbyteStateMessage actualFirstEmission = stateManager.updateAndEmit(NAME_NAMESPACE_PAIR1, "a"); + assertEquals(expectedFirstEmission, actualFirstEmission); + final AirbyteStateMessage expectedSecondEmission = new AirbyteStateMessage() + .withType(AirbyteStateType.LEGACY) + .withData(Jsons.jsonNode(new DbState().withStreams(List.of( + new DbStreamState().withStreamName(STREAM_NAME1).withStreamNamespace(NAMESPACE).withCursorField(List.of(CURSOR_FIELD1)) + .withCursor(null), + new DbStreamState().withStreamName(STREAM_NAME2).withStreamNamespace(NAMESPACE).withCursorField(List.of()) + .withCursor(null)) + .stream().sorted(Comparator.comparing(DbStreamState::getStreamName)).collect(Collectors.toList())) + .withCdc(true))); + final AirbyteStateMessage actualSecondEmission = stateManager.updateAndEmit(NAME_NAMESPACE_PAIR2, "b"); + assertEquals(expectedSecondEmission, actualSecondEmission); + } + + @Test + void testCdcStateManager() { + final ConfiguredAirbyteCatalog catalog = mock(ConfiguredAirbyteCatalog.class); + final CdcState cdcState = new CdcState().withState(Jsons.jsonNode(Map.of("foo", "bar", "baz", 5))); + final DbState dbState = new DbState().withCdcState(cdcState).withStreams(List.of( + new DbStreamState().withStreamNamespace(NAMESPACE).withStreamName(STREAM_NAME1))); + final StateManager stateManager = new LegacyStateManager(dbState, catalog); + assertNotNull(stateManager.getCdcStateManager()); + assertEquals(cdcState, stateManager.getCdcStateManager().getCdcState()); + } + +} diff --git a/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/StateGeneratorUtilsTest.java b/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/StateGeneratorUtilsTest.java new file mode 100644 index 000000000000..9ac94775c928 --- /dev/null +++ b/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/StateGeneratorUtilsTest.java @@ -0,0 +1,39 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.relationaldb.state; + +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import io.airbyte.protocol.models.StreamDescriptor; +import org.junit.jupiter.api.Test; + +/** + * Test suite for the {@link StateGeneratorUtils} class. + */ +public class StateGeneratorUtilsTest { + + @Test + void testValidStreamDescriptor() { + final StreamDescriptor streamDescriptor1 = null; + final StreamDescriptor streamDescriptor2 = new StreamDescriptor(); + final StreamDescriptor streamDescriptor3 = new StreamDescriptor().withName("name"); + final StreamDescriptor streamDescriptor4 = new StreamDescriptor().withNamespace("namespace"); + final StreamDescriptor streamDescriptor5 = new StreamDescriptor().withName("name").withNamespace("namespace"); + final StreamDescriptor streamDescriptor6 = new StreamDescriptor().withName("name").withNamespace(""); + final StreamDescriptor streamDescriptor7 = new StreamDescriptor().withName("").withNamespace("namespace"); + final StreamDescriptor streamDescriptor8 = new StreamDescriptor().withName("").withNamespace(""); + + assertFalse(StateGeneratorUtils.isValidStreamDescriptor(streamDescriptor1)); + assertFalse(StateGeneratorUtils.isValidStreamDescriptor(streamDescriptor2)); + assertTrue(StateGeneratorUtils.isValidStreamDescriptor(streamDescriptor3)); + assertFalse(StateGeneratorUtils.isValidStreamDescriptor(streamDescriptor4)); + assertTrue(StateGeneratorUtils.isValidStreamDescriptor(streamDescriptor5)); + assertTrue(StateGeneratorUtils.isValidStreamDescriptor(streamDescriptor6)); + assertTrue(StateGeneratorUtils.isValidStreamDescriptor(streamDescriptor7)); + assertTrue(StateGeneratorUtils.isValidStreamDescriptor(streamDescriptor8)); + } + +} diff --git a/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/StateManagerFactoryTest.java b/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/StateManagerFactoryTest.java new file mode 100644 index 000000000000..6f911e360ffd --- /dev/null +++ b/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/StateManagerFactoryTest.java @@ -0,0 +1,187 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.relationaldb.state; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import io.airbyte.commons.json.Jsons; +import io.airbyte.integrations.source.relationaldb.models.CdcState; +import io.airbyte.integrations.source.relationaldb.models.DbState; +import io.airbyte.integrations.source.relationaldb.models.DbStreamState; +import io.airbyte.protocol.models.AirbyteGlobalState; +import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; +import io.airbyte.protocol.models.AirbyteStreamState; +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import io.airbyte.protocol.models.StreamDescriptor; +import java.util.List; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +/** + * Test suite for the {@link StateManagerFactory} class. + */ +public class StateManagerFactoryTest { + + private static final String NAMESPACE = "namespace"; + private static final String NAME = "name"; + + @Test + void testNullOrEmptyState() { + final ConfiguredAirbyteCatalog catalog = mock(ConfiguredAirbyteCatalog.class); + + Assertions.assertThrows(IllegalArgumentException.class, () -> { + StateManagerFactory.createStateManager(AirbyteStateType.GLOBAL, null, catalog); + }); + + Assertions.assertThrows(IllegalArgumentException.class, () -> { + StateManagerFactory.createStateManager(AirbyteStateType.GLOBAL, List.of(), catalog); + }); + + Assertions.assertThrows(IllegalArgumentException.class, () -> { + StateManagerFactory.createStateManager(AirbyteStateType.LEGACY, null, catalog); + }); + + Assertions.assertThrows(IllegalArgumentException.class, () -> { + StateManagerFactory.createStateManager(AirbyteStateType.LEGACY, List.of(), catalog); + }); + + Assertions.assertThrows(IllegalArgumentException.class, () -> { + StateManagerFactory.createStateManager(AirbyteStateType.STREAM, null, catalog); + }); + + Assertions.assertThrows(IllegalArgumentException.class, () -> { + StateManagerFactory.createStateManager(AirbyteStateType.STREAM, List.of(), catalog); + }); + } + + @Test + void testLegacyStateManagerCreationFromAirbyteStateMessage() { + final ConfiguredAirbyteCatalog catalog = mock(ConfiguredAirbyteCatalog.class); + final AirbyteStateMessage airbyteStateMessage = mock(AirbyteStateMessage.class); + when(airbyteStateMessage.getData()).thenReturn(Jsons.jsonNode(new DbState())); + + final StateManager stateManager = StateManagerFactory.createStateManager(AirbyteStateType.LEGACY, List.of(airbyteStateMessage), catalog); + + Assertions.assertNotNull(stateManager); + Assertions.assertEquals(LegacyStateManager.class, stateManager.getClass()); + } + + @Test + void testGlobalStateManagerCreation() { + final ConfiguredAirbyteCatalog catalog = mock(ConfiguredAirbyteCatalog.class); + final AirbyteGlobalState globalState = + new AirbyteGlobalState().withSharedState(Jsons.jsonNode(new DbState().withCdcState(new CdcState().withState(Jsons.jsonNode(new DbState()))))) + .withStreamStates(List.of(new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withNamespace(NAMESPACE).withName(NAME)) + .withStreamState(Jsons.jsonNode(new DbStreamState())))); + final AirbyteStateMessage airbyteStateMessage = new AirbyteStateMessage().withType(AirbyteStateType.GLOBAL).withGlobal(globalState); + + final StateManager stateManager = StateManagerFactory.createStateManager(AirbyteStateType.GLOBAL, List.of(airbyteStateMessage), catalog); + + Assertions.assertNotNull(stateManager); + Assertions.assertEquals(GlobalStateManager.class, stateManager.getClass()); + } + + @Test + void testGlobalStateManagerCreationFromLegacyState() { + final ConfiguredAirbyteCatalog catalog = mock(ConfiguredAirbyteCatalog.class); + final CdcState cdcState = new CdcState(); + final DbState dbState = new DbState() + .withCdcState(cdcState) + .withStreams(List.of(new DbStreamState().withStreamName(NAME).withStreamNamespace(NAMESPACE))); + final AirbyteStateMessage airbyteStateMessage = + new AirbyteStateMessage().withType(AirbyteStateType.LEGACY).withData(Jsons.jsonNode(dbState)); + + final StateManager stateManager = StateManagerFactory.createStateManager(AirbyteStateType.GLOBAL, List.of(airbyteStateMessage), catalog); + + Assertions.assertNotNull(stateManager); + Assertions.assertEquals(GlobalStateManager.class, stateManager.getClass()); + } + + @Test + void testGlobalStateManagerCreationFromStreamState() { + final ConfiguredAirbyteCatalog catalog = mock(ConfiguredAirbyteCatalog.class); + final AirbyteStateMessage airbyteStateMessage = new AirbyteStateMessage().withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName(NAME).withNamespace( + NAMESPACE)).withStreamState(Jsons.jsonNode(new DbStreamState()))); + + Assertions.assertThrows(IllegalArgumentException.class, + () -> StateManagerFactory.createStateManager(AirbyteStateType.GLOBAL, List.of(airbyteStateMessage), catalog)); + } + + @Test + void testGlobalStateManagerCreationWithLegacyDataPresent() { + final ConfiguredAirbyteCatalog catalog = mock(ConfiguredAirbyteCatalog.class); + final AirbyteGlobalState globalState = + new AirbyteGlobalState().withSharedState(Jsons.jsonNode(new DbState().withCdcState(new CdcState().withState(Jsons.jsonNode(new DbState()))))) + .withStreamStates(List.of(new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withNamespace(NAMESPACE).withName(NAME)) + .withStreamState(Jsons.jsonNode(new DbStreamState())))); + final AirbyteStateMessage airbyteStateMessage = + new AirbyteStateMessage().withType(AirbyteStateType.GLOBAL).withGlobal(globalState).withData(Jsons.jsonNode(new DbState())); + + final StateManager stateManager = StateManagerFactory.createStateManager(AirbyteStateType.GLOBAL, List.of(airbyteStateMessage), catalog); + + Assertions.assertNotNull(stateManager); + Assertions.assertEquals(GlobalStateManager.class, stateManager.getClass()); + } + + @Test + void testStreamStateManagerCreation() { + final ConfiguredAirbyteCatalog catalog = mock(ConfiguredAirbyteCatalog.class); + final AirbyteStateMessage airbyteStateMessage = new AirbyteStateMessage().withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName(NAME).withNamespace( + NAMESPACE)).withStreamState(Jsons.jsonNode(new DbStreamState()))); + + final StateManager stateManager = StateManagerFactory.createStateManager(AirbyteStateType.STREAM, List.of(airbyteStateMessage), catalog); + + Assertions.assertNotNull(stateManager); + Assertions.assertEquals(StreamStateManager.class, stateManager.getClass()); + } + + @Test + void testStreamStateManagerCreationFromLegacy() { + final ConfiguredAirbyteCatalog catalog = mock(ConfiguredAirbyteCatalog.class); + final CdcState cdcState = new CdcState(); + final DbState dbState = new DbState() + .withCdcState(cdcState) + .withStreams(List.of(new DbStreamState().withStreamName(NAME).withStreamNamespace(NAMESPACE))); + final AirbyteStateMessage airbyteStateMessage = + new AirbyteStateMessage().withType(AirbyteStateType.LEGACY).withData(Jsons.jsonNode(dbState)); + + final StateManager stateManager = StateManagerFactory.createStateManager(AirbyteStateType.STREAM, List.of(airbyteStateMessage), catalog); + + Assertions.assertNotNull(stateManager); + Assertions.assertEquals(StreamStateManager.class, stateManager.getClass()); + } + + @Test + void testStreamStateManagerCreationFromGlobal() { + final ConfiguredAirbyteCatalog catalog = mock(ConfiguredAirbyteCatalog.class); + final AirbyteGlobalState globalState = + new AirbyteGlobalState().withSharedState(Jsons.jsonNode(new DbState().withCdcState(new CdcState().withState(Jsons.jsonNode(new DbState()))))) + .withStreamStates(List.of(new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withNamespace(NAMESPACE).withName(NAME)) + .withStreamState(Jsons.jsonNode(new DbStreamState())))); + final AirbyteStateMessage airbyteStateMessage = new AirbyteStateMessage().withType(AirbyteStateType.GLOBAL).withGlobal(globalState); + + Assertions.assertThrows(IllegalArgumentException.class, + () -> StateManagerFactory.createStateManager(AirbyteStateType.STREAM, List.of(airbyteStateMessage), catalog)); + } + + @Test + void testStreamStateManagerCreationWithLegacyDataPresent() { + final ConfiguredAirbyteCatalog catalog = mock(ConfiguredAirbyteCatalog.class); + final AirbyteStateMessage airbyteStateMessage = new AirbyteStateMessage().withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName(NAME).withNamespace( + NAMESPACE)).withStreamState(Jsons.jsonNode(new DbStreamState()))) + .withData(Jsons.jsonNode(new DbState())); + + final StateManager stateManager = StateManagerFactory.createStateManager(AirbyteStateType.STREAM, List.of(airbyteStateMessage), catalog); + + Assertions.assertNotNull(stateManager); + Assertions.assertEquals(StreamStateManager.class, stateManager.getClass()); + } + +} diff --git a/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/StateTestConstants.java b/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/StateTestConstants.java new file mode 100644 index 000000000000..e939c9aea87d --- /dev/null +++ b/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/StateTestConstants.java @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.relationaldb.state; + +import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; +import io.airbyte.integrations.source.relationaldb.models.DbStreamState; +import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import io.airbyte.protocol.models.ConfiguredAirbyteStream; +import java.util.Collections; +import java.util.List; +import java.util.Optional; +import org.testcontainers.shaded.com.google.common.collect.Lists; + +/** + * Collection of constants for use in state management-related tests. + */ +public final class StateTestConstants { + + public static final String NAMESPACE = "public"; + public static final String STREAM_NAME1 = "cars"; + public static final AirbyteStreamNameNamespacePair NAME_NAMESPACE_PAIR1 = new AirbyteStreamNameNamespacePair(STREAM_NAME1, NAMESPACE); + public static final String STREAM_NAME2 = "bicycles"; + public static final AirbyteStreamNameNamespacePair NAME_NAMESPACE_PAIR2 = new AirbyteStreamNameNamespacePair(STREAM_NAME2, NAMESPACE); + public static final String STREAM_NAME3 = "stationary_bicycles"; + public static final String CURSOR_FIELD1 = "year"; + public static final String CURSOR_FIELD2 = "generation"; + public static final String CURSOR = "2000"; + + private StateTestConstants() {} + + @SuppressWarnings("SameParameterValue") + public static Optional getState(final String cursorField, final String cursor) { + return Optional.of(new DbStreamState() + .withStreamName(STREAM_NAME1) + .withCursorField(Lists.newArrayList(cursorField)) + .withCursor(cursor)); + } + + public static Optional getCatalog(final String cursorField) { + return Optional.of(new ConfiguredAirbyteCatalog() + .withStreams(List.of(getStream(cursorField).orElse(null)))); + } + + public static Optional getStream(final String cursorField) { + return Optional.of(new ConfiguredAirbyteStream() + .withStream(new AirbyteStream().withName(STREAM_NAME1)) + .withCursorField(cursorField == null ? Collections.emptyList() : Lists.newArrayList(cursorField))); + } + +} diff --git a/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/StreamStateManagerTest.java b/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/StreamStateManagerTest.java new file mode 100644 index 000000000000..4b6876987fe4 --- /dev/null +++ b/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/StreamStateManagerTest.java @@ -0,0 +1,255 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.relationaldb.state; + +import static io.airbyte.integrations.source.relationaldb.state.StateTestConstants.CURSOR; +import static io.airbyte.integrations.source.relationaldb.state.StateTestConstants.CURSOR_FIELD1; +import static io.airbyte.integrations.source.relationaldb.state.StateTestConstants.CURSOR_FIELD2; +import static io.airbyte.integrations.source.relationaldb.state.StateTestConstants.NAMESPACE; +import static io.airbyte.integrations.source.relationaldb.state.StateTestConstants.NAME_NAMESPACE_PAIR1; +import static io.airbyte.integrations.source.relationaldb.state.StateTestConstants.NAME_NAMESPACE_PAIR2; +import static io.airbyte.integrations.source.relationaldb.state.StateTestConstants.STREAM_NAME1; +import static io.airbyte.integrations.source.relationaldb.state.StateTestConstants.STREAM_NAME2; +import static io.airbyte.integrations.source.relationaldb.state.StateTestConstants.STREAM_NAME3; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.mockito.Mockito.mock; + +import io.airbyte.commons.json.Jsons; +import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; +import io.airbyte.integrations.source.relationaldb.models.DbState; +import io.airbyte.integrations.source.relationaldb.models.DbStreamState; +import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; +import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.AirbyteStreamState; +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import io.airbyte.protocol.models.ConfiguredAirbyteStream; +import io.airbyte.protocol.models.StreamDescriptor; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +/** + * Test suite for the {@link StreamStateManager} class. + */ +public class StreamStateManagerTest { + + @Test + void testCreationFromInvalidState() { + final AirbyteStateMessage airbyteStateMessage = new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName(STREAM_NAME1).withNamespace(NAMESPACE)) + .withStreamState(Jsons.jsonNode("Not a state object"))); + final ConfiguredAirbyteCatalog catalog = mock(ConfiguredAirbyteCatalog.class); + + Assertions.assertDoesNotThrow(() -> { + final StateManager stateManager = new StreamStateManager(List.of(airbyteStateMessage), catalog); + assertNotNull(stateManager); + }); + } + + @Test + void testGetters() { + final List state = new ArrayList<>(); + state.add(createStreamState(STREAM_NAME1, NAMESPACE, List.of(CURSOR_FIELD1), CURSOR)); + state.add(createStreamState(STREAM_NAME2, NAMESPACE, List.of(), null)); + + final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog() + .withStreams(List.of( + new ConfiguredAirbyteStream() + .withStream(new AirbyteStream().withName(STREAM_NAME1).withNamespace(NAMESPACE)) + .withCursorField(List.of(CURSOR_FIELD1)), + new ConfiguredAirbyteStream() + .withStream(new AirbyteStream().withName(STREAM_NAME2).withNamespace(NAMESPACE)))); + + final StateManager stateManager = new StreamStateManager(state, catalog); + + assertEquals(Optional.of(CURSOR_FIELD1), stateManager.getOriginalCursorField(NAME_NAMESPACE_PAIR1)); + assertEquals(Optional.of(CURSOR), stateManager.getOriginalCursor(NAME_NAMESPACE_PAIR1)); + assertEquals(Optional.of(CURSOR_FIELD1), stateManager.getCursorField(NAME_NAMESPACE_PAIR1)); + assertEquals(Optional.of(CURSOR), stateManager.getCursor(NAME_NAMESPACE_PAIR1)); + + assertEquals(Optional.empty(), stateManager.getOriginalCursorField(NAME_NAMESPACE_PAIR2)); + assertEquals(Optional.empty(), stateManager.getOriginalCursor(NAME_NAMESPACE_PAIR2)); + assertEquals(Optional.empty(), stateManager.getCursorField(NAME_NAMESPACE_PAIR2)); + assertEquals(Optional.empty(), stateManager.getCursor(NAME_NAMESPACE_PAIR2)); + } + + @Test + void testToState() { + final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog() + .withStreams(List.of( + new ConfiguredAirbyteStream() + .withStream(new AirbyteStream().withName(STREAM_NAME1).withNamespace(NAMESPACE)) + .withCursorField(List.of(CURSOR_FIELD1)), + new ConfiguredAirbyteStream() + .withStream(new AirbyteStream().withName(STREAM_NAME2).withNamespace(NAMESPACE)) + .withCursorField(List.of(CURSOR_FIELD2)), + new ConfiguredAirbyteStream() + .withStream(new AirbyteStream().withName(STREAM_NAME3).withNamespace(NAMESPACE)))); + + final StateManager stateManager = new StreamStateManager(createDefaultState(), catalog); + + final DbState expectedFirstDbState = new DbState() + .withCdc(false) + .withStreams(List.of( + new DbStreamState() + .withStreamName(STREAM_NAME1) + .withStreamNamespace(NAMESPACE) + .withCursorField(List.of(CURSOR_FIELD1)) + .withCursor("a"), + new DbStreamState() + .withStreamName(STREAM_NAME2) + .withStreamNamespace(NAMESPACE) + .withCursorField(List.of(CURSOR_FIELD2)), + new DbStreamState() + .withStreamName(STREAM_NAME3) + .withStreamNamespace(NAMESPACE)) + .stream().sorted(Comparator.comparing(DbStreamState::getStreamName)).collect(Collectors.toList())); + final AirbyteStateMessage expectedFirstEmission = + createStreamState(STREAM_NAME1, NAMESPACE, List.of(CURSOR_FIELD1), "a").withData(Jsons.jsonNode(expectedFirstDbState)); + + final AirbyteStateMessage actualFirstEmission = stateManager.updateAndEmit(NAME_NAMESPACE_PAIR1, "a"); + assertEquals(expectedFirstEmission, actualFirstEmission); + + final DbState expectedSecondDbState = new DbState() + .withCdc(false) + .withStreams(List.of( + new DbStreamState() + .withStreamName(STREAM_NAME1) + .withStreamNamespace(NAMESPACE) + .withCursorField(List.of(CURSOR_FIELD1)) + .withCursor("a"), + new DbStreamState() + .withStreamName(STREAM_NAME2) + .withStreamNamespace(NAMESPACE) + .withCursorField(List.of(CURSOR_FIELD2)) + .withCursor("b"), + new DbStreamState() + .withStreamName(STREAM_NAME3) + .withStreamNamespace(NAMESPACE)) + .stream().sorted(Comparator.comparing(DbStreamState::getStreamName)).collect(Collectors.toList())); + final AirbyteStateMessage expectedSecondEmission = + createStreamState(STREAM_NAME2, NAMESPACE, List.of(CURSOR_FIELD2), "b").withData(Jsons.jsonNode(expectedSecondDbState)); + + final AirbyteStateMessage actualSecondEmission = stateManager.updateAndEmit(NAME_NAMESPACE_PAIR2, "b"); + assertEquals(expectedSecondEmission, actualSecondEmission); + } + + @Test + void testToStateWithoutCursorInfo() { + final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog() + .withStreams(List.of( + new ConfiguredAirbyteStream() + .withStream(new AirbyteStream().withName(STREAM_NAME1).withNamespace(NAMESPACE)) + .withCursorField(List.of(CURSOR_FIELD1)), + new ConfiguredAirbyteStream() + .withStream(new AirbyteStream().withName(STREAM_NAME2).withNamespace(NAMESPACE)) + .withCursorField(List.of(CURSOR_FIELD2)), + new ConfiguredAirbyteStream() + .withStream(new AirbyteStream().withName(STREAM_NAME3).withNamespace(NAMESPACE)))); + final AirbyteStreamNameNamespacePair airbyteStreamNameNamespacePair = new AirbyteStreamNameNamespacePair("other", "other"); + + final StateManager stateManager = new StreamStateManager(createDefaultState(), catalog); + final AirbyteStateMessage airbyteStateMessage = stateManager.toState(Optional.of(airbyteStreamNameNamespacePair)); + assertNotNull(airbyteStateMessage); + assertEquals(AirbyteStateType.STREAM, airbyteStateMessage.getType()); + assertNotNull(airbyteStateMessage.getStream()); + } + + @Test + void testToStateWithoutStreamPair() { + final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog() + .withStreams(List.of( + new ConfiguredAirbyteStream() + .withStream(new AirbyteStream().withName(STREAM_NAME1).withNamespace(NAMESPACE)) + .withCursorField(List.of(CURSOR_FIELD1)), + new ConfiguredAirbyteStream() + .withStream(new AirbyteStream().withName(STREAM_NAME2).withNamespace(NAMESPACE)) + .withCursorField(List.of(CURSOR_FIELD2)), + new ConfiguredAirbyteStream() + .withStream(new AirbyteStream().withName(STREAM_NAME3).withNamespace(NAMESPACE)))); + + final StateManager stateManager = new StreamStateManager(createDefaultState(), catalog); + final AirbyteStateMessage airbyteStateMessage = stateManager.toState(Optional.empty()); + assertNotNull(airbyteStateMessage); + assertEquals(AirbyteStateType.STREAM, airbyteStateMessage.getType()); + assertNotNull(airbyteStateMessage.getStream()); + assertNull(airbyteStateMessage.getStream().getStreamState()); + } + + @Test + void testToStateNullCursorField() { + final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog() + .withStreams(List.of( + new ConfiguredAirbyteStream() + .withStream(new AirbyteStream().withName(STREAM_NAME1).withNamespace(NAMESPACE)) + .withCursorField(List.of(CURSOR_FIELD1)), + new ConfiguredAirbyteStream() + .withStream(new AirbyteStream().withName(STREAM_NAME2).withNamespace(NAMESPACE)))); + final StateManager stateManager = new StreamStateManager(createDefaultState(), catalog); + + final DbState expectedFirstDbState = new DbState() + .withCdc(false) + .withStreams(List.of( + new DbStreamState() + .withStreamName(STREAM_NAME1) + .withStreamNamespace(NAMESPACE) + .withCursorField(List.of(CURSOR_FIELD1)) + .withCursor("a"), + new DbStreamState() + .withStreamName(STREAM_NAME2) + .withStreamNamespace(NAMESPACE)) + .stream().sorted(Comparator.comparing(DbStreamState::getStreamName)).collect(Collectors.toList())); + + final AirbyteStateMessage expectedFirstEmission = + createStreamState(STREAM_NAME1, NAMESPACE, List.of(CURSOR_FIELD1), "a").withData(Jsons.jsonNode(expectedFirstDbState)); + final AirbyteStateMessage actualFirstEmission = stateManager.updateAndEmit(NAME_NAMESPACE_PAIR1, "a"); + assertEquals(expectedFirstEmission, actualFirstEmission); + } + + @Test + void testCdcStateManager() { + final ConfiguredAirbyteCatalog catalog = mock(ConfiguredAirbyteCatalog.class); + final StateManager stateManager = new StreamStateManager( + List.of(new AirbyteStateMessage().withType(AirbyteStateType.STREAM).withStream(new AirbyteStreamState())), catalog); + Assertions.assertThrows(UnsupportedOperationException.class, () -> stateManager.getCdcStateManager()); + } + + private List createDefaultState() { + return List.of(new AirbyteStateMessage().withType(AirbyteStateType.STREAM).withStream(new AirbyteStreamState())); + } + + private AirbyteStateMessage createStreamState(final String name, + final String namespace, + final List cursorFields, + final String cursorValue) { + final DbStreamState dbStreamState = new DbStreamState() + .withStreamName(name) + .withStreamNamespace(namespace); + + if (cursorFields != null && !cursorFields.isEmpty()) { + dbStreamState.withCursorField(cursorFields); + } + + if (cursorValue != null) { + dbStreamState.withCursor(cursorValue); + } + + return new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName(name).withNamespace(namespace)) + .withStreamState(Jsons.jsonNode(dbStreamState))); + } + +} diff --git a/airbyte-integrations/connectors/source-relational-db/src/test/resources/states/global.json b/airbyte-integrations/connectors/source-relational-db/src/test/resources/states/global.json new file mode 100644 index 000000000000..5b1c5189b5fe --- /dev/null +++ b/airbyte-integrations/connectors/source-relational-db/src/test/resources/states/global.json @@ -0,0 +1,49 @@ +[ + { + "type": "GLOBAL", + "global": { + "shared_state": { + "state": { + "foo": "bar", + "baz": 5 + } + }, + "stream_states": [ + { + "stream_descriptor": { + "name": "bicycles", + "namespace": "public" + }, + "stream_state": { + "stream_name": "bicycles", + "stream_namespace": "public", + "cursor_field": ["generation"] + } + }, + { + "stream_descriptor": { + "name": "cars", + "namespace": "public" + }, + "stream_state": { + "stream_name": "cars", + "stream_namespace": "public", + "cursor_field": ["year"], + "cursor": "a" + } + }, + { + "stream_descriptor": { + "name": "stationary_bicycles", + "namespace": "public" + }, + "stream_state": { + "stream_name": "stationary_bicycles", + "stream_namespace": "public", + "cursor_field": [] + } + } + ] + } + } +] diff --git a/airbyte-integrations/connectors/source-relational-db/src/test/resources/states/legacy.json b/airbyte-integrations/connectors/source-relational-db/src/test/resources/states/legacy.json new file mode 100644 index 000000000000..e20bdc553087 --- /dev/null +++ b/airbyte-integrations/connectors/source-relational-db/src/test/resources/states/legacy.json @@ -0,0 +1,17 @@ +{ + "cdc": false, + "streams": [ + { + "cursor": "4", + "stream_name": "cars", + "cursor_field": ["id"], + "stream_namespace": "public" + }, + { + "cursor": "1", + "stream_name": "us_states", + "cursor_field": ["id"], + "stream_namespace": "public" + } + ] +} diff --git a/airbyte-integrations/connectors/source-relational-db/src/test/resources/states/per_stream.json b/airbyte-integrations/connectors/source-relational-db/src/test/resources/states/per_stream.json new file mode 100644 index 000000000000..9644b13ed156 --- /dev/null +++ b/airbyte-integrations/connectors/source-relational-db/src/test/resources/states/per_stream.json @@ -0,0 +1,32 @@ +[ + { + "type": "STREAM", + "stream": { + "stream_descriptor": { + "name": "id_and_name", + "namespace": "public" + }, + "stream_state": { + "stream_name": "id_and_name", + "stream_namespace": "public", + "cursor_field": ["id"], + "cursor": "5" + } + } + }, + { + "type": "STREAM", + "stream": { + "stream_descriptor": { + "name": "other", + "namespace": "public" + }, + "stream_state": { + "stream_name": "other", + "stream_namespace": "public", + "cursor_field": ["id"], + "cursor": "2" + } + } + } +] diff --git a/airbyte-integrations/connectors/source-salesforce/Dockerfile b/airbyte-integrations/connectors/source-salesforce/Dockerfile index aadd91b9170f..e7bbb0550892 100644 --- a/airbyte-integrations/connectors/source-salesforce/Dockerfile +++ b/airbyte-integrations/connectors/source-salesforce/Dockerfile @@ -13,5 +13,5 @@ RUN pip install . ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=1.0.9 +LABEL io.airbyte.version=1.0.10 LABEL io.airbyte.name=airbyte/source-salesforce diff --git a/airbyte-integrations/connectors/source-salesforce/integration_tests/integration_test.py b/airbyte-integrations/connectors/source-salesforce/integration_tests/integration_test.py index f60bc0734085..acc2bc8f706a 100644 --- a/airbyte-integrations/connectors/source-salesforce/integration_tests/integration_test.py +++ b/airbyte-integrations/connectors/source-salesforce/integration_tests/integration_test.py @@ -141,8 +141,10 @@ def test_parallel_discover(input_sandbox_config): start_time = datetime.now() parallel_schemas = sf.generate_schemas(stream_objects) parallel_loading_time = (datetime.now() - start_time).total_seconds() + + print(f'\nparallel discover ~ {round(consecutive_loading_time/parallel_loading_time, 1)}x faster over traditional.\n') - assert parallel_loading_time < consecutive_loading_time / 5.0, "parallel should be more than 10x faster" + assert parallel_loading_time < consecutive_loading_time, "parallel should be more than 10x faster" assert set(consecutive_schemas.keys()) == set(parallel_schemas.keys()) for stream_name, schema in consecutive_schemas.items(): assert schema == parallel_schemas[stream_name] diff --git a/airbyte-integrations/connectors/source-salesforce/source_salesforce/streams.py b/airbyte-integrations/connectors/source-salesforce/source_salesforce/streams.py index a506a2632793..22ec66f84191 100644 --- a/airbyte-integrations/connectors/source-salesforce/source_salesforce/streams.py +++ b/airbyte-integrations/connectors/source-salesforce/source_salesforce/streams.py @@ -312,8 +312,8 @@ def read_with_chunks(self, path: str = None, chunk_size: int = 100) -> Iterable[ chunks = pd.read_csv(data, chunksize=chunk_size, iterator=True, dialect="unix") for chunk in chunks: chunk = chunk.replace({nan: None}).to_dict(orient="records") - for n, row in enumerate(chunk, 1): - yield n, row + for row in chunk: + yield row except pd.errors.EmptyDataError as e: self.logger.info(f"Empty data received. {e}") yield from [] @@ -382,12 +382,15 @@ def read_records( count = 0 record: Mapping[str, Any] = {} - for count, record in self.read_with_chunks(self.download_data(url=job_full_url)): + for record in self.read_with_chunks(self.download_data(url=job_full_url)): + count += 1 yield record self.delete_job(url=job_full_url) if count < self.page_size: - # this is a last page + # Salesforce doesn't give a next token or something to know the request was + # the last page. The connectors will sync batches in `page_size` and + # considers that batch is smaller than the `page_size` it must be the last page. break next_page_token = self.next_page_token(record) diff --git a/airbyte-integrations/connectors/source-salesforce/unit_tests/api_test.py b/airbyte-integrations/connectors/source-salesforce/unit_tests/api_test.py index 0888317eee54..961de6aae754 100644 --- a/airbyte-integrations/connectors/source-salesforce/unit_tests/api_test.py +++ b/airbyte-integrations/connectors/source-salesforce/unit_tests/api_test.py @@ -215,7 +215,7 @@ def test_download_data_filter_null_bytes(stream_config, stream_api): m.register_uri("GET", f"{job_full_url}/results", content=b'"Id","IsDeleted"\n\x00"0014W000027f6UwQAI","false"\n\x00\x00') res = list(stream.read_with_chunks(stream.download_data(url=job_full_url))) - assert res == [(1, {"Id": "0014W000027f6UwQAI", "IsDeleted": False})] + assert res == [{"Id": "0014W000027f6UwQAI", "IsDeleted": False}] def test_check_connection_rate_limit(stream_config): @@ -427,7 +427,7 @@ def test_csv_reader_dialect_unix(): with requests_mock.Mocker() as m: m.register_uri("GET", url + "/results", text=text) - result = [dict(i[1]) for i in stream.read_with_chunks(stream.download_data(url))] + result = [i for i in stream.read_with_chunks(stream.download_data(url))] assert result == data diff --git a/airbyte-integrations/connectors/source-sftp/Dockerfile b/airbyte-integrations/connectors/source-sftp/Dockerfile index e71fcc42082e..4f1bf333ae73 100644 --- a/airbyte-integrations/connectors/source-sftp/Dockerfile +++ b/airbyte-integrations/connectors/source-sftp/Dockerfile @@ -14,5 +14,5 @@ ENV APPLICATION source-sftp COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.1 +LABEL io.airbyte.version=0.1.2 LABEL io.airbyte.name=airbyte/source-sftp diff --git a/airbyte-integrations/connectors/source-sftp/build.gradle b/airbyte-integrations/connectors/source-sftp/build.gradle index 9346d556d61b..410e4f1b8dfa 100644 --- a/airbyte-integrations/connectors/source-sftp/build.gradle +++ b/airbyte-integrations/connectors/source-sftp/build.gradle @@ -18,5 +18,5 @@ dependencies { integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:source-sftp') - testImplementation libs.testcontainers + testImplementation libs.connectors.testcontainers } diff --git a/airbyte-integrations/connectors/source-stripe/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-stripe/integration_tests/configured_catalog.json index 9169e2499f94..2816dd231484 100644 --- a/airbyte-integrations/connectors/source-stripe/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-stripe/integration_tests/configured_catalog.json @@ -11,7 +11,8 @@ }, "sync_mode": "incremental", "destination_sync_mode": "overwrite", - "cursor_field": ["created"] + "cursor_field": ["created"], + "primary_key": [["id"]] }, { "stream": { @@ -21,7 +22,8 @@ "source_defined_primary_key": [["id"]] }, "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" + "destination_sync_mode": "overwrite", + "primary_key": [["id"]] }, { "stream": { @@ -34,7 +36,8 @@ }, "sync_mode": "incremental", "destination_sync_mode": "overwrite", - "cursor_field": ["created"] + "cursor_field": ["created"], + "primary_key": [["id"]] }, { "stream": { @@ -47,7 +50,8 @@ }, "sync_mode": "full_refresh", "destination_sync_mode": "overwrite", - "cursor_field": ["expires_at"] + "cursor_field": ["expires_at"], + "primary_key": [["id"]] }, { "stream": { @@ -60,7 +64,8 @@ }, "sync_mode": "full_refresh", "destination_sync_mode": "overwrite", - "cursor_field": ["checkout_session_expires_at"] + "cursor_field": ["checkout_session_expires_at"], + "primary_key": [["id"]] }, { "stream": { @@ -73,7 +78,8 @@ }, "sync_mode": "incremental", "destination_sync_mode": "overwrite", - "cursor_field": ["created"] + "cursor_field": ["created"], + "primary_key": [["id"]] }, { "stream": { @@ -83,7 +89,8 @@ "source_defined_primary_key": [["id"]] }, "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" + "destination_sync_mode": "overwrite", + "primary_key": [["id"]] }, { "stream": { @@ -96,7 +103,8 @@ }, "sync_mode": "incremental", "destination_sync_mode": "overwrite", - "cursor_field": ["created"] + "cursor_field": ["created"], + "primary_key": [["id"]] }, { "stream": { @@ -109,7 +117,8 @@ }, "sync_mode": "incremental", "destination_sync_mode": "overwrite", - "cursor_field": ["created"] + "cursor_field": ["created"], + "primary_key": [["id"]] }, { "stream": { @@ -122,7 +131,8 @@ }, "sync_mode": "incremental", "destination_sync_mode": "overwrite", - "cursor_field": ["created"] + "cursor_field": ["created"], + "primary_key": [["id"]] }, { "stream": { @@ -135,7 +145,8 @@ }, "sync_mode": "incremental", "destination_sync_mode": "overwrite", - "cursor_field": ["date"] + "cursor_field": ["date"], + "primary_key": [["id"]] }, { "stream": { @@ -145,7 +156,8 @@ "source_defined_primary_key": [["id"]] }, "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" + "destination_sync_mode": "overwrite", + "primary_key": [["id"]] }, { "stream": { @@ -158,7 +170,8 @@ }, "sync_mode": "incremental", "destination_sync_mode": "overwrite", - "cursor_field": ["created"] + "cursor_field": ["created"], + "primary_key": [["id"]] }, { "stream": { @@ -171,7 +184,8 @@ }, "sync_mode": "incremental", "destination_sync_mode": "overwrite", - "cursor_field": ["created"] + "cursor_field": ["created"], + "primary_key": [["id"]] }, { "stream": { @@ -184,7 +198,8 @@ }, "sync_mode": "incremental", "destination_sync_mode": "overwrite", - "cursor_field": ["created"] + "cursor_field": ["created"], + "primary_key": [["id"]] }, { "stream": { @@ -197,7 +212,8 @@ }, "sync_mode": "incremental", "destination_sync_mode": "overwrite", - "cursor_field": ["created"] + "cursor_field": ["created"], + "primary_key": [["id"]] }, { "stream": { @@ -210,7 +226,8 @@ }, "sync_mode": "incremental", "destination_sync_mode": "overwrite", - "cursor_field": ["created"] + "cursor_field": ["created"], + "primary_key": [["id"]] }, { "stream": { @@ -223,7 +240,8 @@ }, "sync_mode": "incremental", "destination_sync_mode": "overwrite", - "cursor_field": ["created"] + "cursor_field": ["created"], + "primary_key": [["id"]] }, { "stream": { @@ -236,7 +254,8 @@ }, "sync_mode": "incremental", "destination_sync_mode": "overwrite", - "cursor_field": ["created"] + "cursor_field": ["created"], + "primary_key": [["id"]] }, { "stream": { @@ -246,7 +265,8 @@ "source_defined_primary_key": [["id"]] }, "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" + "destination_sync_mode": "overwrite", + "primary_key": [["id"]] }, { "stream": { @@ -259,7 +279,8 @@ }, "sync_mode": "incremental", "destination_sync_mode": "overwrite", - "cursor_field": ["created"] + "cursor_field": ["created"], + "primary_key": [["id"]] }, { "stream": { @@ -272,7 +293,8 @@ }, "sync_mode": "incremental", "destination_sync_mode": "overwrite", - "cursor_field": ["created"] + "cursor_field": ["created"], + "primary_key": [["id"]] } ] } diff --git a/airbyte-integrations/connectors/source-stripe/integration_tests/connected_account_configured_catalog.json b/airbyte-integrations/connectors/source-stripe/integration_tests/connected_account_configured_catalog.json index eed53127e063..9305587c765f 100644 --- a/airbyte-integrations/connectors/source-stripe/integration_tests/connected_account_configured_catalog.json +++ b/airbyte-integrations/connectors/source-stripe/integration_tests/connected_account_configured_catalog.json @@ -11,7 +11,8 @@ }, "sync_mode": "incremental", "destination_sync_mode": "overwrite", - "cursor_field": ["created"] + "cursor_field": ["created"], + "primary_key": [["id"]] }, { "stream": { @@ -24,7 +25,8 @@ }, "sync_mode": "incremental", "destination_sync_mode": "overwrite", - "cursor_field": ["created"] + "cursor_field": ["created"], + "primary_key": [["id"]] }, { "stream": { @@ -36,7 +38,9 @@ "source_defined_primary_key": [["id"]] }, "sync_mode": "incremental", - "destination_sync_mode": "append" + "destination_sync_mode": "append", + "cursor_field": ["created"], + "primary_key": [["id"]] } ] } diff --git a/airbyte-integrations/connectors/source-stripe/integration_tests/full_refresh_configured_catalog.json b/airbyte-integrations/connectors/source-stripe/integration_tests/full_refresh_configured_catalog.json index 7c2994fd3ec4..a6fbc37c8e2d 100644 --- a/airbyte-integrations/connectors/source-stripe/integration_tests/full_refresh_configured_catalog.json +++ b/airbyte-integrations/connectors/source-stripe/integration_tests/full_refresh_configured_catalog.json @@ -17,7 +17,8 @@ "source_defined_primary_key": [["id"]] }, "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" + "destination_sync_mode": "overwrite", + "primary_key": [["id"]] }, { "stream": { @@ -27,7 +28,8 @@ "source_defined_primary_key": [["id"]] }, "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" + "destination_sync_mode": "overwrite", + "primary_key": [["id"]] }, { "stream": { @@ -40,7 +42,8 @@ }, "sync_mode": "full_refresh", "destination_sync_mode": "overwrite", - "cursor_field": ["checkout_session_expires_at"] + "cursor_field": ["checkout_session_expires_at"], + "primary_key": [["id"]] } ] } diff --git a/airbyte-integrations/connectors/source-stripe/integration_tests/non_invoice_line_items_catalog.json b/airbyte-integrations/connectors/source-stripe/integration_tests/non_invoice_line_items_catalog.json index 1467d1f2242e..f9e9239038e4 100644 --- a/airbyte-integrations/connectors/source-stripe/integration_tests/non_invoice_line_items_catalog.json +++ b/airbyte-integrations/connectors/source-stripe/integration_tests/non_invoice_line_items_catalog.json @@ -11,7 +11,8 @@ }, "sync_mode": "incremental", "destination_sync_mode": "overwrite", - "cursor_field": ["created"] + "cursor_field": ["created"], + "primary_key": [["id"]] }, { "stream": { @@ -24,7 +25,8 @@ }, "sync_mode": "incremental", "destination_sync_mode": "overwrite", - "cursor_field": ["created"] + "cursor_field": ["created"], + "primary_key": [["id"]] }, { "stream": { @@ -37,7 +39,8 @@ }, "sync_mode": "incremental", "destination_sync_mode": "overwrite", - "cursor_field": ["created"] + "cursor_field": ["created"], + "primary_key": [["id"]] }, { "stream": { @@ -50,7 +53,8 @@ }, "sync_mode": "incremental", "destination_sync_mode": "overwrite", - "cursor_field": ["created"] + "cursor_field": ["created"], + "primary_key": [["id"]] }, { "stream": { @@ -63,7 +67,8 @@ }, "sync_mode": "incremental", "destination_sync_mode": "overwrite", - "cursor_field": ["created"] + "cursor_field": ["created"], + "primary_key": [["id"]] }, { "stream": { @@ -76,7 +81,8 @@ }, "sync_mode": "incremental", "destination_sync_mode": "overwrite", - "cursor_field": ["date"] + "cursor_field": ["date"], + "primary_key": [["id"]] }, { "stream": { @@ -89,7 +95,8 @@ }, "sync_mode": "incremental", "destination_sync_mode": "overwrite", - "cursor_field": ["created"] + "cursor_field": ["created"], + "primary_key": [["id"]] }, { "stream": { @@ -102,7 +109,8 @@ }, "sync_mode": "incremental", "destination_sync_mode": "overwrite", - "cursor_field": ["created"] + "cursor_field": ["created"], + "primary_key": [["id"]] }, { "stream": { @@ -115,7 +123,8 @@ }, "sync_mode": "incremental", "destination_sync_mode": "overwrite", - "cursor_field": ["created"] + "cursor_field": ["created"], + "primary_key": [["id"]] }, { "stream": { @@ -128,7 +137,8 @@ }, "sync_mode": "incremental", "destination_sync_mode": "overwrite", - "cursor_field": ["created"] + "cursor_field": ["created"], + "primary_key": [["id"]] }, { "stream": { @@ -141,7 +151,8 @@ }, "sync_mode": "incremental", "destination_sync_mode": "overwrite", - "cursor_field": ["created"] + "cursor_field": ["created"], + "primary_key": [["id"]] }, { "stream": { @@ -154,7 +165,8 @@ }, "sync_mode": "incremental", "destination_sync_mode": "overwrite", - "cursor_field": ["created"] + "cursor_field": ["created"], + "primary_key": [["id"]] } ] } diff --git a/airbyte-integrations/connectors/source-tidb/Dockerfile b/airbyte-integrations/connectors/source-tidb/Dockerfile index 6179f1f2b654..d322630a76e5 100755 --- a/airbyte-integrations/connectors/source-tidb/Dockerfile +++ b/airbyte-integrations/connectors/source-tidb/Dockerfile @@ -17,5 +17,5 @@ ENV APPLICATION source-tidb COPY --from=build /airbyte /airbyte # Airbyte's build system uses these labels to know what to name and tag the docker images produced by this Dockerfile. -LABEL io.airbyte.version=0.1.1 +LABEL io.airbyte.version=0.1.2 LABEL io.airbyte.name=airbyte/source-tidb diff --git a/airbyte-integrations/connectors/source-tidb/build.gradle b/airbyte-integrations/connectors/source-tidb/build.gradle index 93b915be4516..7676d78d77af 100755 --- a/airbyte-integrations/connectors/source-tidb/build.gradle +++ b/airbyte-integrations/connectors/source-tidb/build.gradle @@ -20,7 +20,7 @@ dependencies { implementation 'mysql:mysql-connector-java:8.0.22' // Add testcontainers and use GenericContainer for TiDB - implementation libs.testcontainers + implementation libs.connectors.testcontainers.tidb testImplementation testFixtures(project(':airbyte-integrations:connectors:source-jdbc')) @@ -29,7 +29,7 @@ dependencies { integrationTestJavaImplementation project(':airbyte-integrations:connectors:source-tidb') integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') - integrationTestJavaImplementation libs.testcontainers + integrationTestJavaImplementation libs.connectors.testcontainers.tidb implementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) integrationTestJavaImplementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) diff --git a/airbyte-integrations/connectors/source-tidb/src/test-integration/java/io/airbyte/integrations/source/tidb/TiDBSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-tidb/src/test-integration/java/io/airbyte/integrations/source/tidb/TiDBSourceAcceptanceTest.java index c966e2283da7..f4f5a262a7fe 100755 --- a/airbyte-integrations/connectors/source-tidb/src/test-integration/java/io/airbyte/integrations/source/tidb/TiDBSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-tidb/src/test-integration/java/io/airbyte/integrations/source/tidb/TiDBSourceAcceptanceTest.java @@ -14,6 +14,7 @@ import io.airbyte.integrations.base.ssh.SshHelpers; import io.airbyte.integrations.standardtest.source.SourceAcceptanceTest; import io.airbyte.integrations.standardtest.source.TestDestinationEnv; +import io.airbyte.integrations.util.HostPortResolver; import io.airbyte.protocol.models.CatalogHelpers; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.ConfiguredAirbyteStream; @@ -43,8 +44,8 @@ protected void setupEnvironment(final TestDestinationEnv testEnv) throws Excepti container.start(); config = Jsons.jsonNode(ImmutableMap.builder() - .put("host", "127.0.0.1") - .put("port", container.getFirstMappedPort()) + .put("host", HostPortResolver.resolveHost(container)) + .put("port", HostPortResolver.resolvePort(container)) .put("username", "root") .put("database", "test") .build()); diff --git a/airbyte-integrations/connectors/source-tiktok-marketing/Dockerfile b/airbyte-integrations/connectors/source-tiktok-marketing/Dockerfile index 04834b7eab4e..ce82afb0fa90 100644 --- a/airbyte-integrations/connectors/source-tiktok-marketing/Dockerfile +++ b/airbyte-integrations/connectors/source-tiktok-marketing/Dockerfile @@ -32,5 +32,5 @@ COPY source_tiktok_marketing ./source_tiktok_marketing ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.12 +LABEL io.airbyte.version=0.1.13 LABEL io.airbyte.name=airbyte/source-tiktok-marketing diff --git a/airbyte-integrations/connectors/source-tiktok-marketing/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-tiktok-marketing/integration_tests/configured_catalog.json index 89d4d9679c38..3bfa7120fba3 100644 --- a/airbyte-integrations/connectors/source-tiktok-marketing/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-tiktok-marketing/integration_tests/configured_catalog.json @@ -21,6 +21,17 @@ "sync_mode": "full_refresh", "destination_sync_mode": "append" }, + { + "stream": { + "name": "advertisers_reports", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["dimensions", "stat_time_day"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, { "stream": { "name": "advertisers", diff --git a/airbyte-integrations/connectors/source-tiktok-marketing/source_tiktok_marketing/streams.py b/airbyte-integrations/connectors/source-tiktok-marketing/source_tiktok_marketing/streams.py index fae6fad7b01b..31359e394634 100644 --- a/airbyte-integrations/connectors/source-tiktok-marketing/source_tiktok_marketing/streams.py +++ b/airbyte-integrations/connectors/source-tiktok-marketing/source_tiktok_marketing/streams.py @@ -32,6 +32,24 @@ "secondary_goal_result_rate", "cash_spend", "voucher_spend", + "video_play_actions", + "video_watched_2s", + "video_watched_6s", + "average_video_play", + "average_video_play_per_user", + "video_views_p25", + "video_views_p50", + "video_views_p75", + "video_views_p100", + "profile_visits", + "likes", + "comments", + "shares", + "follows", + "clicks_on_music_disc", + "real_time_app_install", + "real_time_app_install_cost", + "app_install", ] T = TypeVar("T") @@ -486,7 +504,35 @@ def _get_reporting_dimensions(self): def _get_metrics(self): # common metrics for all reporting levels - result = ["spend", "cpc", "cpm", "impressions", "clicks", "ctr", "reach", "cost_per_1000_reached", "frequency"] + result = [ + "spend", + "cpc", + "cpm", + "impressions", + "clicks", + "ctr", + "reach", + "cost_per_1000_reached", + "frequency", + "video_play_actions", + "video_watched_2s", + "video_watched_6s", + "average_video_play", + "average_video_play_per_user", + "video_views_p25", + "video_views_p50", + "video_views_p75", + "video_views_p100", + "profile_visits", + "likes", + "comments", + "shares", + "follows", + "clicks_on_music_disc", + "real_time_app_install", + "real_time_app_install_cost", + "app_install", + ] if self.report_level == ReportLevel.ADVERTISER and self.report_granularity == ReportGranularity.DAY: # https://ads.tiktok.com/marketing_api/docs?id=1707957200780290 diff --git a/airbyte-integrations/connectors/source-tiktok-marketing/unit_tests/streams_test.py b/airbyte-integrations/connectors/source-tiktok-marketing/unit_tests/streams_test.py index 73d74913a17c..420f67c1081b 100644 --- a/airbyte-integrations/connectors/source-tiktok-marketing/unit_tests/streams_test.py +++ b/airbyte-integrations/connectors/source-tiktok-marketing/unit_tests/streams_test.py @@ -123,10 +123,10 @@ def test_stream_slices_report(advertiser_ids, granularity, slices_expected, pend @pytest.mark.parametrize( "stream, metrics_number", [ - (AdsReports, 36), - (AdGroupsReports, 33), - (AdvertisersReports, 11), - (CampaignsReports, 10), + (AdsReports, 54), + (AdGroupsReports, 51), + (AdvertisersReports, 29), + (CampaignsReports, 28), (AdvertisersAudienceReports, 6), (AdsAudienceReports, 30), ], @@ -140,10 +140,10 @@ def test_basic_reports_get_metrics_day(stream, metrics_number): @pytest.mark.parametrize( "stream, metrics_number", [ - (AdsReports, 36), - (AdGroupsReports, 33), - (AdvertisersReports, 9), - (CampaignsReports, 10), + (AdsReports, 54), + (AdGroupsReports, 51), + (AdvertisersReports, 27), + (CampaignsReports, 28), (AdvertisersAudienceReports, 6), ], ) diff --git a/airbyte-integrations/connectors/source-twilio/Dockerfile b/airbyte-integrations/connectors/source-twilio/Dockerfile index 7b7dc90951e0..95c06a7fbd72 100644 --- a/airbyte-integrations/connectors/source-twilio/Dockerfile +++ b/airbyte-integrations/connectors/source-twilio/Dockerfile @@ -4,13 +4,13 @@ FROM python:3.9-slim RUN apt-get update && apt-get install -y bash && rm -rf /var/lib/apt/lists/* WORKDIR /airbyte/integration_code -COPY source_twilio ./source_twilio -COPY main.py ./ COPY setup.py ./ RUN pip install . +COPY source_twilio ./source_twilio +COPY main.py ./ ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.4 +LABEL io.airbyte.version=0.1.6 LABEL io.airbyte.name=airbyte/source-twilio diff --git a/airbyte-integrations/connectors/source-twilio/acceptance-test-config.yml b/airbyte-integrations/connectors/source-twilio/acceptance-test-config.yml index 656393e7b96a..eb3095a70237 100644 --- a/airbyte-integrations/connectors/source-twilio/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-twilio/acceptance-test-config.yml @@ -13,18 +13,18 @@ tests: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/no_empty_streams_catalog.json" empty_streams: ["message_media", "conferences"] + expect_records: + path: "integration_tests/expected_records.txt" incremental: - config_path: "secrets/config.json" # usage records stream produces and error if cursor date gte than current date configured_catalog_path: "integration_tests/no_empty_streams_no_usage_records_catalog.json" future_state_path: "integration_tests/abnormal_state.json" - cursor_paths: - calls: ["end_time"] - conferences: ["date_updated"] - recordings: ["date_created"] - messages: ["date_sent"] - message_media: ["date_created"] - alerts: ["date_updated"] + - config_path: "secrets/config_with_lookback.json" + # usage records stream produces and error if cursor date gte than current date + configured_catalog_path: "integration_tests/no_empty_streams_no_usage_records_catalog.json" + future_state_path: "integration_tests/abnormal_state.json" + threshold_days: 30 full_refresh: - config_path: "secrets/config.json" # `constant_records_catalog.json` does not contain the available phone numbers streams, diff --git a/airbyte-integrations/connectors/source-twilio/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-twilio/acceptance-test-docker.sh old mode 100644 new mode 100755 diff --git a/airbyte-integrations/connectors/source-twilio/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-twilio/integration_tests/abnormal_state.json index 1b2c80c8ba64..7da15e996072 100644 --- a/airbyte-integrations/connectors/source-twilio/integration_tests/abnormal_state.json +++ b/airbyte-integrations/connectors/source-twilio/integration_tests/abnormal_state.json @@ -1,20 +1,20 @@ { "calls": { - "end_time": "2220-10-01T00:00:00Z" + "end_time": "2030-10-01T00:00:00Z" }, "conferences": { - "date_updated": "2220-10-01T00:00:00Z" + "date_created": "2030-10-01T00:00:00Z" }, "recordings": { - "date_created": "2220-10-01T00:00:00Z" + "date_created": "2030-10-01T00:00:00Z" }, "messages": { - "date_sent": "2220-10-01T00:00:00Z" + "date_sent": "2030-10-01T00:00:00Z" }, "message_media": { - "date_created": "2220-10-01T00:00:00Z" + "date_created": "2030-10-01T00:00:00Z" }, "alerts": { - "date_updated": "2220-10-01T00:00:00Z" + "date_generated": "2030-10-01T00:00:00Z" } } diff --git a/airbyte-integrations/connectors/source-twilio/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-twilio/integration_tests/configured_catalog.json index 6461c7b23cdd..accd97f69918 100644 --- a/airbyte-integrations/connectors/source-twilio/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-twilio/integration_tests/configured_catalog.json @@ -108,9 +108,9 @@ "json_schema": {}, "supported_sync_modes": ["incremental", "full_refresh"], "source_defined_cursor": true, - "default_cursor_field": ["date_updated"] + "default_cursor_field": ["date_created"] }, - "cursor_field": ["date_updated"], + "cursor_field": ["date_created"], "sync_mode": "incremental", "destination_sync_mode": "append" }, @@ -213,9 +213,9 @@ "json_schema": {}, "supported_sync_modes": ["incremental", "full_refresh"], "source_defined_cursor": true, - "default_cursor_field": ["date_updated"] + "default_cursor_field": ["date_created"] }, - "cursor_field": ["date_updated"], + "cursor_field": ["date_created"], "sync_mode": "incremental", "destination_sync_mode": "append" } diff --git a/airbyte-integrations/connectors/source-twilio/integration_tests/constant_records_catalog.json b/airbyte-integrations/connectors/source-twilio/integration_tests/constant_records_catalog.json index 8b850c7bb567..724bef534596 100644 --- a/airbyte-integrations/connectors/source-twilio/integration_tests/constant_records_catalog.json +++ b/airbyte-integrations/connectors/source-twilio/integration_tests/constant_records_catalog.json @@ -63,9 +63,9 @@ "json_schema": {}, "supported_sync_modes": ["incremental", "full_refresh"], "source_defined_cursor": true, - "default_cursor_field": ["date_updated"] + "default_cursor_field": ["date_created"] }, - "cursor_field": ["date_updated"], + "cursor_field": ["date_created"], "sync_mode": "incremental", "destination_sync_mode": "append" }, @@ -159,9 +159,9 @@ "json_schema": {}, "supported_sync_modes": ["incremental", "full_refresh"], "source_defined_cursor": true, - "default_cursor_field": ["date_updated"] + "default_cursor_field": ["date_created"] }, - "cursor_field": ["date_updated"], + "cursor_field": ["date_created"], "sync_mode": "incremental", "destination_sync_mode": "append" } diff --git a/airbyte-integrations/connectors/source-twilio/integration_tests/expected_records.txt b/airbyte-integrations/connectors/source-twilio/integration_tests/expected_records.txt new file mode 100644 index 000000000000..74c6933cdef1 --- /dev/null +++ b/airbyte-integrations/connectors/source-twilio/integration_tests/expected_records.txt @@ -0,0 +1,534 @@ +{"stream": "addresses", "data": {"sid": "AD0164001bc0f84d9bc29e17378fe47c20", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "friendly_name": null, "customer_name": "test-customer_name_2", "street": "test-street_2", "street_secondary": null, "city": "test-city_2", "region": "test-region_2", "postal_code": "test-postal_code_2", "iso_country": "US", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Addresses/AD0164001bc0f84d9bc29e17378fe47c20.json", "date_created": "2020-11-25T09:41:48Z", "date_updated": "2020-11-25T09:41:48Z", "emergency_enabled": false, "validated": false, "verified": false}, "emitted_at": 1655893072016} +{"stream": "addresses", "data": {"sid": "AD12011c521c9991202e7d77d7d652b457", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "friendly_name": null, "customer_name": "test-customer_name", "street": "test-street", "street_secondary": null, "city": "test-city", "region": "test-region", "postal_code": "test-postal_code", "iso_country": "US", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Addresses/AD12011c521c9991202e7d77d7d652b457.json", "date_created": "2020-11-25T09:38:01Z", "date_updated": "2020-11-25T09:38:01Z", "emergency_enabled": false, "validated": false, "verified": false}, "emitted_at": 1655893072018} +{"stream": "addresses", "data": {"sid": "AD42931b949c0dedce94b2f93847fdcf95", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "friendly_name": null, "customer_name": "test-customer_name_5", "street": "test-street_5", "street_secondary": null, "city": "test-city_5", "region": "test-region_5", "postal_code": "test-postal_code_5", "iso_country": "US", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Addresses/AD42931b949c0dedce94b2f93847fdcf95.json", "date_created": "2020-11-25T09:41:49Z", "date_updated": "2020-11-25T09:41:49Z", "emergency_enabled": false, "validated": false, "verified": false}, "emitted_at": 1655893072020} +{"stream": "addresses", "data": {"sid": "AD824661054d24f09a92a4afa9d5ccc2cf", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "friendly_name": null, "customer_name": "test-customer_name_4", "street": "test-street_4", "street_secondary": null, "city": "test-city_4", "region": "test-region_4", "postal_code": "test-postal_code_4", "iso_country": "US", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Addresses/AD824661054d24f09a92a4afa9d5ccc2cf.json", "date_created": "2020-11-25T09:41:49Z", "date_updated": "2020-11-25T09:41:49Z", "emergency_enabled": false, "validated": false, "verified": false}, "emitted_at": 1655893072021} +{"stream": "addresses", "data": {"sid": "AD9cc2cc40dafe63c70e17ad3b8bfe9ffa", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "friendly_name": null, "customer_name": "test-customer_name_3", "street": "test-street_3", "street_secondary": null, "city": "test-city_3", "region": "test-region_3", "postal_code": "test-postal_code_3", "iso_country": "US", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Addresses/AD9cc2cc40dafe63c70e17ad3b8bfe9ffa.json", "date_created": "2020-11-25T09:41:49Z", "date_updated": "2020-11-25T09:41:49Z", "emergency_enabled": false, "validated": false, "verified": false}, "emitted_at": 1655893072022} +{"stream": "addresses", "data": {"sid": "ADa29b1ee20cf61d213f7d7f1a3298309a", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "friendly_name": null, "customer_name": "test-customer_name_1", "street": "test-street_1", "street_secondary": null, "city": "test-city_1", "region": "test-region_1", "postal_code": "test-postal_code_1", "iso_country": "US", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Addresses/ADa29b1ee20cf61d213f7d7f1a3298309a.json", "date_created": "2020-11-25T09:41:48Z", "date_updated": "2020-11-25T09:41:48Z", "emergency_enabled": false, "validated": false, "verified": false}, "emitted_at": 1655893072023} +{"stream": "applications", "data": {"sms_status_callback": null, "voice_caller_id_lookup": false, "voice_fallback_url": null, "date_updated": "2020-11-25T09:47:31Z", "sms_fallback_method": "POST", "friendly_name": "Test friendly name", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Applications/APd6232730849b51fb86fa20a8081fa27c.json", "sms_fallback_url": null, "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "voice_method": "GET", "voice_url": "http://demo.twilio.com/docs/voice.xml", "sms_method": "POST", "status_callback_method": "POST", "sid": "APd6232730849b51fb86fa20a8081fa27c", "date_created": "2020-11-25T09:47:31Z", "sms_url": null, "status_callback": null, "voice_fallback_method": "POST", "api_version": "2010-04-01", "message_status_callback": null}, "emitted_at": 1655893073756} +{"stream": "applications", "data": {"sms_status_callback": null, "voice_caller_id_lookup": false, "voice_fallback_url": null, "date_updated": "2020-11-25T09:47:31Z", "sms_fallback_method": "POST", "friendly_name": "Test friendly name", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Applications/APe7ed98d5222e25db0938c1efc5c661b2.json", "sms_fallback_url": null, "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "voice_method": "GET", "voice_url": "http://demo.twilio.com/docs/voice.xml", "sms_method": "POST", "status_callback_method": "POST", "sid": "APe7ed98d5222e25db0938c1efc5c661b2", "date_created": "2020-11-25T09:47:31Z", "sms_url": null, "status_callback": null, "voice_fallback_method": "POST", "api_version": "2010-04-01", "message_status_callback": null}, "emitted_at": 1655893073767} +{"stream": "applications", "data": {"sms_status_callback": null, "voice_caller_id_lookup": false, "voice_fallback_url": null, "date_updated": "2020-11-25T09:47:31Z", "sms_fallback_method": "POST", "friendly_name": "Test friendly name", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Applications/AP731b039bbb9103a1ae2f0afbe85949d4.json", "sms_fallback_url": null, "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "voice_method": "GET", "voice_url": "http://demo.twilio.com/docs/voice.xml", "sms_method": "POST", "status_callback_method": "POST", "sid": "AP731b039bbb9103a1ae2f0afbe85949d4", "date_created": "2020-11-25T09:47:31Z", "sms_url": null, "status_callback": null, "voice_fallback_method": "POST", "api_version": "2010-04-01", "message_status_callback": null}, "emitted_at": 1655893073768} +{"stream": "applications", "data": {"sms_status_callback": null, "voice_caller_id_lookup": false, "voice_fallback_url": null, "date_updated": "2020-11-25T09:47:31Z", "sms_fallback_method": "POST", "friendly_name": "Test friendly name", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Applications/AP1c10c50172412d3a65dfd7395d11640f.json", "sms_fallback_url": null, "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "voice_method": "GET", "voice_url": "http://demo.twilio.com/docs/voice.xml", "sms_method": "POST", "status_callback_method": "POST", "sid": "AP1c10c50172412d3a65dfd7395d11640f", "date_created": "2020-11-25T09:47:31Z", "sms_url": null, "status_callback": null, "voice_fallback_method": "POST", "api_version": "2010-04-01", "message_status_callback": null}, "emitted_at": 1655893073769} +{"stream": "applications", "data": {"sms_status_callback": null, "voice_caller_id_lookup": false, "voice_fallback_url": null, "date_updated": "2020-11-25T09:47:31Z", "sms_fallback_method": "POST", "friendly_name": "Test friendly name", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Applications/AP9370b66dc53499e2459d82d75d21c6f8.json", "sms_fallback_url": null, "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "voice_method": "GET", "voice_url": "http://demo.twilio.com/docs/voice.xml", "sms_method": "POST", "status_callback_method": "POST", "sid": "AP9370b66dc53499e2459d82d75d21c6f8", "date_created": "2020-11-25T09:47:31Z", "sms_url": null, "status_callback": null, "voice_fallback_method": "POST", "api_version": "2010-04-01", "message_status_callback": null}, "emitted_at": 1655893073769} +{"stream": "available_phone_number_countries", "data": {"country_code": "PT", "country": "Portugal", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/PT.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/PT/Local.json", "mobile": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/PT/Mobile.json"}}, "emitted_at": 1655893076197} +{"stream": "available_phone_number_countries", "data": {"country_code": "SE", "country": "Sweden", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/SE.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/SE/Local.json", "mobile": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/SE/Mobile.json"}}, "emitted_at": 1655893076200} +{"stream": "available_phone_number_countries", "data": {"country_code": "IE", "country": "Ireland", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/IE.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/IE/Local.json", "mobile": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/IE/Mobile.json"}}, "emitted_at": 1655893076202} +{"stream": "available_phone_number_countries", "data": {"country_code": "RO", "country": "Romania", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/RO.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/RO/Local.json", "toll_free": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/RO/TollFree.json"}}, "emitted_at": 1655893076203} +{"stream": "available_phone_number_countries", "data": {"country_code": "AE", "country": "United Arab Emirates", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/AE.json", "beta": false, "subresource_uris": {"toll_free": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/AE/TollFree.json"}}, "emitted_at": 1655893076204} +{"stream": "available_phone_number_countries", "data": {"country_code": "FI", "country": "Finland", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/FI.json", "beta": false, "subresource_uris": {"toll_free": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/FI/TollFree.json", "mobile": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/FI/Mobile.json"}}, "emitted_at": 1655893076205} +{"stream": "available_phone_number_countries", "data": {"country_code": "GB", "country": "United Kingdom", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/GB.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/GB/Local.json", "toll_free": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/GB/TollFree.json", "mobile": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/GB/Mobile.json"}}, "emitted_at": 1655893076206} +{"stream": "available_phone_number_countries", "data": {"country_code": "PA", "country": "Panama", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/PA.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/PA/Local.json"}}, "emitted_at": 1655893076207} +{"stream": "available_phone_number_countries", "data": {"country_code": "PE", "country": "Peru", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/PE.json", "beta": false, "subresource_uris": {"toll_free": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/PE/TollFree.json"}}, "emitted_at": 1655893076208} +{"stream": "available_phone_number_countries", "data": {"country_code": "FR", "country": "France", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/FR.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/FR/Local.json"}}, "emitted_at": 1655893076209} +{"stream": "available_phone_number_countries", "data": {"country_code": "CZ", "country": "Czech Republic", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/CZ.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/CZ/Local.json", "toll_free": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/CZ/TollFree.json", "mobile": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/CZ/Mobile.json"}}, "emitted_at": 1655893076210} +{"stream": "available_phone_number_countries", "data": {"country_code": "BE", "country": "Belgium", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/BE.json", "beta": false, "subresource_uris": {"toll_free": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/BE/TollFree.json", "mobile": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/BE/Mobile.json"}}, "emitted_at": 1655893076211} +{"stream": "available_phone_number_countries", "data": {"country_code": "DE", "country": "Germany", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/DE.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/DE/Local.json", "mobile": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/DE/Mobile.json"}}, "emitted_at": 1655893076212} +{"stream": "available_phone_number_countries", "data": {"country_code": "CA", "country": "Canada", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/CA.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/CA/Local.json", "toll_free": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/CA/TollFree.json"}}, "emitted_at": 1655893076213} +{"stream": "available_phone_number_countries", "data": {"country_code": "GH", "country": "Ghana", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/GH.json", "beta": false, "subresource_uris": {"mobile": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/GH/Mobile.json"}}, "emitted_at": 1655893076215} +{"stream": "available_phone_number_countries", "data": {"country_code": "DK", "country": "Denmark", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/DK.json", "beta": false, "subresource_uris": {"toll_free": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/DK/TollFree.json", "mobile": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/DK/Mobile.json"}}, "emitted_at": 1655893076216} +{"stream": "available_phone_number_countries", "data": {"country_code": "UG", "country": "Uganda", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/UG.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/UG/Local.json", "toll_free": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/UG/TollFree.json"}}, "emitted_at": 1655893076217} +{"stream": "available_phone_number_countries", "data": {"country_code": "PL", "country": "Poland", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/PL.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/PL/Local.json", "toll_free": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/PL/TollFree.json", "mobile": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/PL/Mobile.json"}}, "emitted_at": 1655893076219} +{"stream": "available_phone_number_countries", "data": {"country_code": "MX", "country": "Mexico", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/MX.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/MX/Local.json", "toll_free": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/MX/TollFree.json"}}, "emitted_at": 1655893076220} +{"stream": "available_phone_number_countries", "data": {"country_code": "IS", "country": "Iceland", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/IS.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/IS/Local.json", "mobile": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/IS/Mobile.json"}}, "emitted_at": 1655893076221} +{"stream": "available_phone_number_countries", "data": {"country_code": "DZ", "country": "Algeria", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/DZ.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/DZ/Local.json"}}, "emitted_at": 1655893076222} +{"stream": "available_phone_number_countries", "data": {"country_code": "ZA", "country": "South Africa", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/ZA.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/ZA/Local.json", "mobile": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/ZA/Mobile.json"}}, "emitted_at": 1655893076223} +{"stream": "available_phone_number_countries", "data": {"country_code": "JP", "country": "Japan", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/JP.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/JP/Local.json", "toll_free": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/JP/TollFree.json"}}, "emitted_at": 1655893076223} +{"stream": "available_phone_number_countries", "data": {"country_code": "HR", "country": "Croatia", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/HR.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/HR/Local.json", "toll_free": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/HR/TollFree.json"}}, "emitted_at": 1655893076224} +{"stream": "available_phone_number_countries", "data": {"country_code": "ID", "country": "Indonesia", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/ID.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/ID/Local.json", "toll_free": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/ID/TollFree.json"}}, "emitted_at": 1655893076225} +{"stream": "available_phone_number_countries", "data": {"country_code": "BR", "country": "Brazil", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/BR.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/BR/Local.json", "toll_free": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/BR/TollFree.json"}}, "emitted_at": 1655893076225} +{"stream": "available_phone_number_countries", "data": {"country_code": "AT", "country": "Austria", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/AT.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/AT/Local.json", "toll_free": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/AT/TollFree.json", "mobile": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/AT/Mobile.json"}}, "emitted_at": 1655893076226} +{"stream": "available_phone_number_countries", "data": {"country_code": "US", "country": "United States", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/US.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/US/Local.json", "toll_free": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/US/TollFree.json"}}, "emitted_at": 1655893076227} +{"stream": "available_phone_number_countries", "data": {"country_code": "VI", "country": "Virgin Islands, U.S.", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/VI.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/VI/Local.json"}}, "emitted_at": 1655893076228} +{"stream": "available_phone_number_countries", "data": {"country_code": "EC", "country": "Ecuador", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/EC.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/EC/Local.json"}}, "emitted_at": 1655893076228} +{"stream": "available_phone_number_countries", "data": {"country_code": "KE", "country": "Kenya", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/KE.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/KE/Local.json"}}, "emitted_at": 1655893076229} +{"stream": "available_phone_number_countries", "data": {"country_code": "NL", "country": "Netherlands", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/NL.json", "beta": false, "subresource_uris": {"mobile": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/NL/Mobile.json"}}, "emitted_at": 1655893076229} +{"stream": "available_phone_number_countries", "data": {"country_code": "CL", "country": "Chile", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/CL.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/CL/Local.json", "mobile": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/CL/Mobile.json"}}, "emitted_at": 1655893076230} +{"stream": "available_phone_number_countries", "data": {"country_code": "CH", "country": "Switzerland", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/CH.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/CH/Local.json", "mobile": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/CH/Mobile.json"}}, "emitted_at": 1655893076230} +{"stream": "available_phone_number_countries", "data": {"country_code": "TN", "country": "Tunisia", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/TN.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/TN/Local.json"}}, "emitted_at": 1655893076231} +{"stream": "available_phone_number_countries", "data": {"country_code": "TT", "country": "Trinidad and Tobago", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/TT.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/TT/Local.json"}}, "emitted_at": 1655893076232} +{"stream": "available_phone_number_countries", "data": {"country_code": "TH", "country": "Thailand", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/TH.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/TH/Local.json", "toll_free": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/TH/TollFree.json"}}, "emitted_at": 1655893076232} +{"stream": "available_phone_number_countries", "data": {"country_code": "SI", "country": "Slovenia", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/SI.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/SI/Local.json"}}, "emitted_at": 1655893076233} +{"stream": "available_phone_number_countries", "data": {"country_code": "SK", "country": "Slovakia", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/SK.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/SK/Local.json", "toll_free": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/SK/TollFree.json"}}, "emitted_at": 1655893076233} +{"stream": "available_phone_number_countries", "data": {"country_code": "SG", "country": "Singapore", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/SG.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/SG/Local.json", "mobile": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/SG/Mobile.json"}}, "emitted_at": 1655893076233} +{"stream": "available_phone_number_countries", "data": {"country_code": "PR", "country": "Puerto Rico", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/PR.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/PR/Local.json"}}, "emitted_at": 1655893076234} +{"stream": "available_phone_number_countries", "data": {"country_code": "PH", "country": "Philippines", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/PH.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/PH/Local.json", "toll_free": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/PH/TollFree.json", "mobile": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/PH/Mobile.json"}}, "emitted_at": 1655893076234} +{"stream": "available_phone_number_countries", "data": {"country_code": "NZ", "country": "New Zealand", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/NZ.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/NZ/Local.json", "toll_free": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/NZ/TollFree.json"}}, "emitted_at": 1655893076235} +{"stream": "available_phone_number_countries", "data": {"country_code": "NA", "country": "Namibia", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/NA.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/NA/Local.json"}}, "emitted_at": 1655893076235} +{"stream": "available_phone_number_countries", "data": {"country_code": "MU", "country": "Mauritius", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/MU.json", "beta": false, "subresource_uris": {"mobile": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/MU/Mobile.json"}}, "emitted_at": 1655893076236} +{"stream": "available_phone_number_countries", "data": {"country_code": "ML", "country": "Mali", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/ML.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/ML/Local.json"}}, "emitted_at": 1655893076236} +{"stream": "available_phone_number_countries", "data": {"country_code": "MO", "country": "Macau", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/MO.json", "beta": false, "subresource_uris": {"mobile": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/MO/Mobile.json"}}, "emitted_at": 1655893076236} +{"stream": "available_phone_number_countries", "data": {"country_code": "LU", "country": "Luxembourg", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/LU.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/LU/Local.json"}}, "emitted_at": 1655893076237} +{"stream": "available_phone_number_countries", "data": {"country_code": "LT", "country": "Lithuania", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/LT.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/LT/Local.json", "mobile": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/LT/Mobile.json"}}, "emitted_at": 1655893076237} +{"stream": "available_phone_number_countries", "data": {"country_code": "JM", "country": "Jamaica", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/JM.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/JM/Local.json"}}, "emitted_at": 1655893076238} +{"stream": "available_phone_number_countries", "data": {"country_code": "IL", "country": "Israel", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/IL.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/IL/Local.json", "toll_free": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/IL/TollFree.json", "mobile": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/IL/Mobile.json"}}, "emitted_at": 1655893076238} +{"stream": "available_phone_number_countries", "data": {"country_code": "HU", "country": "Hungary", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/HU.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/HU/Local.json", "mobile": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/HU/Mobile.json"}}, "emitted_at": 1655893076238} +{"stream": "available_phone_number_countries", "data": {"country_code": "HK", "country": "Hong Kong", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/HK.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/HK/Local.json", "toll_free": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/HK/TollFree.json", "mobile": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/HK/Mobile.json"}}, "emitted_at": 1655893076239} +{"stream": "available_phone_number_countries", "data": {"country_code": "GN", "country": "Guinea", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/GN.json", "beta": false, "subresource_uris": {"mobile": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/GN/Mobile.json"}}, "emitted_at": 1655893076239} +{"stream": "available_phone_number_countries", "data": {"country_code": "GD", "country": "Grenada", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/GD.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/GD/Local.json"}}, "emitted_at": 1655893076240} +{"stream": "available_phone_number_countries", "data": {"country_code": "GR", "country": "Greece", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/GR.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/GR/Local.json"}}, "emitted_at": 1655893076241} +{"stream": "available_phone_number_countries", "data": {"country_code": "GE", "country": "Georgia", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/GE.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/GE/Local.json"}}, "emitted_at": 1655893076241} +{"stream": "available_phone_number_countries", "data": {"country_code": "EE", "country": "Estonia", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/EE.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/EE/Local.json", "mobile": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/EE/Mobile.json"}}, "emitted_at": 1655893076242} +{"stream": "available_phone_number_countries", "data": {"country_code": "SV", "country": "El Salvador", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/SV.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/SV/Local.json"}}, "emitted_at": 1655893076242} +{"stream": "available_phone_number_countries", "data": {"country_code": "DO", "country": "Dominican Republic", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/DO.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/DO/Local.json"}}, "emitted_at": 1655893076242} +{"stream": "available_phone_number_countries", "data": {"country_code": "CY", "country": "Cyprus", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/CY.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/CY/Local.json"}}, "emitted_at": 1655893076243} +{"stream": "available_phone_number_countries", "data": {"country_code": "CO", "country": "Colombia", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/CO.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/CO/Local.json", "toll_free": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/CO/TollFree.json"}}, "emitted_at": 1655893076243} +{"stream": "available_phone_number_countries", "data": {"country_code": "KY", "country": "Cayman Islands", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/KY.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/KY/Local.json"}}, "emitted_at": 1655893076243} +{"stream": "available_phone_number_countries", "data": {"country_code": "BG", "country": "Bulgaria", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/BG.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/BG/Local.json", "toll_free": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/BG/TollFree.json"}}, "emitted_at": 1655893076244} +{"stream": "available_phone_number_countries", "data": {"country_code": "BW", "country": "Botswana", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/BW.json", "beta": false, "subresource_uris": {"toll_free": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/BW/TollFree.json"}}, "emitted_at": 1655893076244} +{"stream": "available_phone_number_countries", "data": {"country_code": "BA", "country": "Bosnia and Herzegovina", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/BA.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/BA/Local.json"}}, "emitted_at": 1655893076244} +{"stream": "available_phone_number_countries", "data": {"country_code": "BJ", "country": "Benin", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/BJ.json", "beta": false, "subresource_uris": {"mobile": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/BJ/Mobile.json"}}, "emitted_at": 1655893076244} +{"stream": "available_phone_number_countries", "data": {"country_code": "BB", "country": "Barbados", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/BB.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/BB/Local.json"}}, "emitted_at": 1655893076245} +{"stream": "available_phone_number_countries", "data": {"country_code": "AU", "country": "Australia", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/AU.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/AU/Local.json", "toll_free": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/AU/TollFree.json", "mobile": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/AU/Mobile.json"}}, "emitted_at": 1655893076245} +{"stream": "available_phone_number_countries", "data": {"country_code": "AR", "country": "Argentina", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/AR.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/AR/Local.json", "toll_free": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/AR/TollFree.json"}}, "emitted_at": 1655893076245} +{"stream": "available_phone_number_countries", "data": {"country_code": "PT", "country": "Portugal", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/PT.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/PT/Local.json", "mobile": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/PT/Mobile.json"}}, "emitted_at": 1655893077076} +{"stream": "available_phone_number_countries", "data": {"country_code": "SE", "country": "Sweden", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/SE.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/SE/Local.json", "mobile": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/SE/Mobile.json"}}, "emitted_at": 1655893077077} +{"stream": "available_phone_number_countries", "data": {"country_code": "IE", "country": "Ireland", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/IE.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/IE/Local.json", "mobile": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/IE/Mobile.json"}}, "emitted_at": 1655893077078} +{"stream": "available_phone_number_countries", "data": {"country_code": "RO", "country": "Romania", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/RO.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/RO/Local.json", "toll_free": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/RO/TollFree.json"}}, "emitted_at": 1655893077079} +{"stream": "available_phone_number_countries", "data": {"country_code": "AE", "country": "United Arab Emirates", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/AE.json", "beta": false, "subresource_uris": {"toll_free": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/AE/TollFree.json"}}, "emitted_at": 1655893077080} +{"stream": "available_phone_number_countries", "data": {"country_code": "FI", "country": "Finland", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/FI.json", "beta": false, "subresource_uris": {"toll_free": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/FI/TollFree.json", "mobile": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/FI/Mobile.json"}}, "emitted_at": 1655893077081} +{"stream": "available_phone_number_countries", "data": {"country_code": "GB", "country": "United Kingdom", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/GB.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/GB/Local.json", "toll_free": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/GB/TollFree.json", "mobile": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/GB/Mobile.json"}}, "emitted_at": 1655893077083} +{"stream": "available_phone_number_countries", "data": {"country_code": "PA", "country": "Panama", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/PA.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/PA/Local.json"}}, "emitted_at": 1655893077084} +{"stream": "available_phone_number_countries", "data": {"country_code": "PE", "country": "Peru", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/PE.json", "beta": false, "subresource_uris": {"toll_free": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/PE/TollFree.json"}}, "emitted_at": 1655893077085} +{"stream": "available_phone_number_countries", "data": {"country_code": "FR", "country": "France", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/FR.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/FR/Local.json"}}, "emitted_at": 1655893077086} +{"stream": "available_phone_number_countries", "data": {"country_code": "CZ", "country": "Czech Republic", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/CZ.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/CZ/Local.json", "toll_free": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/CZ/TollFree.json", "mobile": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/CZ/Mobile.json"}}, "emitted_at": 1655893077087} +{"stream": "available_phone_number_countries", "data": {"country_code": "BE", "country": "Belgium", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/BE.json", "beta": false, "subresource_uris": {"toll_free": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/BE/TollFree.json", "mobile": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/BE/Mobile.json"}}, "emitted_at": 1655893077088} +{"stream": "available_phone_number_countries", "data": {"country_code": "DE", "country": "Germany", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/DE.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/DE/Local.json", "mobile": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/DE/Mobile.json"}}, "emitted_at": 1655893077089} +{"stream": "available_phone_number_countries", "data": {"country_code": "CA", "country": "Canada", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/CA.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/CA/Local.json", "toll_free": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/CA/TollFree.json"}}, "emitted_at": 1655893077090} +{"stream": "available_phone_number_countries", "data": {"country_code": "GH", "country": "Ghana", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/GH.json", "beta": false, "subresource_uris": {"mobile": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/GH/Mobile.json"}}, "emitted_at": 1655893077091} +{"stream": "available_phone_number_countries", "data": {"country_code": "DK", "country": "Denmark", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/DK.json", "beta": false, "subresource_uris": {"toll_free": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/DK/TollFree.json", "mobile": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/DK/Mobile.json"}}, "emitted_at": 1655893077092} +{"stream": "available_phone_number_countries", "data": {"country_code": "UG", "country": "Uganda", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/UG.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/UG/Local.json", "toll_free": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/UG/TollFree.json"}}, "emitted_at": 1655893077093} +{"stream": "available_phone_number_countries", "data": {"country_code": "PL", "country": "Poland", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/PL.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/PL/Local.json", "toll_free": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/PL/TollFree.json", "mobile": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/PL/Mobile.json"}}, "emitted_at": 1655893077094} +{"stream": "available_phone_number_countries", "data": {"country_code": "MX", "country": "Mexico", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/MX.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/MX/Local.json", "toll_free": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/MX/TollFree.json"}}, "emitted_at": 1655893077095} +{"stream": "available_phone_number_countries", "data": {"country_code": "IS", "country": "Iceland", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/IS.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/IS/Local.json", "mobile": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/IS/Mobile.json"}}, "emitted_at": 1655893077096} +{"stream": "available_phone_number_countries", "data": {"country_code": "DZ", "country": "Algeria", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/DZ.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/DZ/Local.json"}}, "emitted_at": 1655893077098} +{"stream": "available_phone_number_countries", "data": {"country_code": "ZA", "country": "South Africa", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/ZA.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/ZA/Local.json", "mobile": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/ZA/Mobile.json"}}, "emitted_at": 1655893077099} +{"stream": "available_phone_number_countries", "data": {"country_code": "JP", "country": "Japan", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/JP.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/JP/Local.json", "toll_free": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/JP/TollFree.json"}}, "emitted_at": 1655893077100} +{"stream": "available_phone_number_countries", "data": {"country_code": "HR", "country": "Croatia", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/HR.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/HR/Local.json", "toll_free": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/HR/TollFree.json"}}, "emitted_at": 1655893077101} +{"stream": "available_phone_number_countries", "data": {"country_code": "ID", "country": "Indonesia", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/ID.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/ID/Local.json", "toll_free": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/ID/TollFree.json"}}, "emitted_at": 1655893077102} +{"stream": "available_phone_number_countries", "data": {"country_code": "BR", "country": "Brazil", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/BR.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/BR/Local.json", "toll_free": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/BR/TollFree.json"}}, "emitted_at": 1655893077103} +{"stream": "available_phone_number_countries", "data": {"country_code": "AT", "country": "Austria", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/AT.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/AT/Local.json", "toll_free": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/AT/TollFree.json", "mobile": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/AT/Mobile.json"}}, "emitted_at": 1655893077104} +{"stream": "available_phone_number_countries", "data": {"country_code": "US", "country": "United States", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/US.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/US/Local.json", "toll_free": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/US/TollFree.json"}}, "emitted_at": 1655893077105} +{"stream": "available_phone_number_countries", "data": {"country_code": "VI", "country": "Virgin Islands, U.S.", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/VI.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/VI/Local.json"}}, "emitted_at": 1655893077106} +{"stream": "available_phone_number_countries", "data": {"country_code": "EC", "country": "Ecuador", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/EC.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/EC/Local.json"}}, "emitted_at": 1655893077106} +{"stream": "available_phone_number_countries", "data": {"country_code": "KE", "country": "Kenya", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/KE.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/KE/Local.json"}}, "emitted_at": 1655893077107} +{"stream": "available_phone_number_countries", "data": {"country_code": "NL", "country": "Netherlands", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/NL.json", "beta": false, "subresource_uris": {"mobile": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/NL/Mobile.json"}}, "emitted_at": 1655893077108} +{"stream": "available_phone_number_countries", "data": {"country_code": "CL", "country": "Chile", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/CL.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/CL/Local.json", "mobile": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/CL/Mobile.json"}}, "emitted_at": 1655893077108} +{"stream": "available_phone_number_countries", "data": {"country_code": "CH", "country": "Switzerland", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/CH.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/CH/Local.json", "mobile": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/CH/Mobile.json"}}, "emitted_at": 1655893077109} +{"stream": "available_phone_number_countries", "data": {"country_code": "TN", "country": "Tunisia", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/TN.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/TN/Local.json"}}, "emitted_at": 1655893077110} +{"stream": "available_phone_number_countries", "data": {"country_code": "TT", "country": "Trinidad and Tobago", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/TT.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/TT/Local.json"}}, "emitted_at": 1655893077110} +{"stream": "available_phone_number_countries", "data": {"country_code": "TH", "country": "Thailand", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/TH.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/TH/Local.json", "toll_free": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/TH/TollFree.json"}}, "emitted_at": 1655893077111} +{"stream": "available_phone_number_countries", "data": {"country_code": "SI", "country": "Slovenia", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/SI.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/SI/Local.json"}}, "emitted_at": 1655893077111} +{"stream": "available_phone_number_countries", "data": {"country_code": "SK", "country": "Slovakia", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/SK.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/SK/Local.json", "toll_free": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/SK/TollFree.json"}}, "emitted_at": 1655893077112} +{"stream": "available_phone_number_countries", "data": {"country_code": "SG", "country": "Singapore", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/SG.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/SG/Local.json", "mobile": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/SG/Mobile.json"}}, "emitted_at": 1655893077112} +{"stream": "available_phone_number_countries", "data": {"country_code": "PR", "country": "Puerto Rico", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/PR.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/PR/Local.json"}}, "emitted_at": 1655893077113} +{"stream": "available_phone_number_countries", "data": {"country_code": "PH", "country": "Philippines", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/PH.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/PH/Local.json", "toll_free": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/PH/TollFree.json", "mobile": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/PH/Mobile.json"}}, "emitted_at": 1655893077113} +{"stream": "available_phone_number_countries", "data": {"country_code": "NZ", "country": "New Zealand", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/NZ.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/NZ/Local.json", "toll_free": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/NZ/TollFree.json"}}, "emitted_at": 1655893077114} +{"stream": "available_phone_number_countries", "data": {"country_code": "NA", "country": "Namibia", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/NA.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/NA/Local.json"}}, "emitted_at": 1655893077114} +{"stream": "available_phone_number_countries", "data": {"country_code": "MU", "country": "Mauritius", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/MU.json", "beta": false, "subresource_uris": {"mobile": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/MU/Mobile.json"}}, "emitted_at": 1655893077115} +{"stream": "available_phone_number_countries", "data": {"country_code": "ML", "country": "Mali", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/ML.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/ML/Local.json"}}, "emitted_at": 1655893077115} +{"stream": "available_phone_number_countries", "data": {"country_code": "MO", "country": "Macau", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/MO.json", "beta": false, "subresource_uris": {"mobile": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/MO/Mobile.json"}}, "emitted_at": 1655893077115} +{"stream": "available_phone_number_countries", "data": {"country_code": "LU", "country": "Luxembourg", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/LU.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/LU/Local.json"}}, "emitted_at": 1655893077116} +{"stream": "available_phone_number_countries", "data": {"country_code": "LT", "country": "Lithuania", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/LT.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/LT/Local.json", "mobile": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/LT/Mobile.json"}}, "emitted_at": 1655893077116} +{"stream": "available_phone_number_countries", "data": {"country_code": "JM", "country": "Jamaica", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/JM.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/JM/Local.json"}}, "emitted_at": 1655893077117} +{"stream": "available_phone_number_countries", "data": {"country_code": "IL", "country": "Israel", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/IL.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/IL/Local.json", "toll_free": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/IL/TollFree.json", "mobile": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/IL/Mobile.json"}}, "emitted_at": 1655893077117} +{"stream": "available_phone_number_countries", "data": {"country_code": "HU", "country": "Hungary", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/HU.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/HU/Local.json", "mobile": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/HU/Mobile.json"}}, "emitted_at": 1655893077117} +{"stream": "available_phone_number_countries", "data": {"country_code": "HK", "country": "Hong Kong", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/HK.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/HK/Local.json", "toll_free": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/HK/TollFree.json", "mobile": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/HK/Mobile.json"}}, "emitted_at": 1655893077118} +{"stream": "available_phone_number_countries", "data": {"country_code": "GN", "country": "Guinea", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/GN.json", "beta": false, "subresource_uris": {"mobile": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/GN/Mobile.json"}}, "emitted_at": 1655893077118} +{"stream": "available_phone_number_countries", "data": {"country_code": "GD", "country": "Grenada", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/GD.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/GD/Local.json"}}, "emitted_at": 1655893077119} +{"stream": "available_phone_number_countries", "data": {"country_code": "GR", "country": "Greece", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/GR.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/GR/Local.json"}}, "emitted_at": 1655893077119} +{"stream": "available_phone_number_countries", "data": {"country_code": "GE", "country": "Georgia", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/GE.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/GE/Local.json"}}, "emitted_at": 1655893077119} +{"stream": "available_phone_number_countries", "data": {"country_code": "EE", "country": "Estonia", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/EE.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/EE/Local.json", "mobile": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/EE/Mobile.json"}}, "emitted_at": 1655893077120} +{"stream": "available_phone_number_countries", "data": {"country_code": "SV", "country": "El Salvador", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/SV.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/SV/Local.json"}}, "emitted_at": 1655893077120} +{"stream": "available_phone_number_countries", "data": {"country_code": "DO", "country": "Dominican Republic", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/DO.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/DO/Local.json"}}, "emitted_at": 1655893077120} +{"stream": "available_phone_number_countries", "data": {"country_code": "CY", "country": "Cyprus", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/CY.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/CY/Local.json"}}, "emitted_at": 1655893077121} +{"stream": "available_phone_number_countries", "data": {"country_code": "CO", "country": "Colombia", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/CO.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/CO/Local.json", "toll_free": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/CO/TollFree.json"}}, "emitted_at": 1655893077121} +{"stream": "available_phone_number_countries", "data": {"country_code": "KY", "country": "Cayman Islands", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/KY.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/KY/Local.json"}}, "emitted_at": 1655893077122} +{"stream": "available_phone_number_countries", "data": {"country_code": "BG", "country": "Bulgaria", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/BG.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/BG/Local.json", "toll_free": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/BG/TollFree.json"}}, "emitted_at": 1655893077122} +{"stream": "available_phone_number_countries", "data": {"country_code": "BW", "country": "Botswana", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/BW.json", "beta": false, "subresource_uris": {"toll_free": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/BW/TollFree.json"}}, "emitted_at": 1655893077122} +{"stream": "available_phone_number_countries", "data": {"country_code": "BA", "country": "Bosnia and Herzegovina", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/BA.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/BA/Local.json"}}, "emitted_at": 1655893077123} +{"stream": "available_phone_number_countries", "data": {"country_code": "BJ", "country": "Benin", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/BJ.json", "beta": false, "subresource_uris": {"mobile": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/BJ/Mobile.json"}}, "emitted_at": 1655893077123} +{"stream": "available_phone_number_countries", "data": {"country_code": "BB", "country": "Barbados", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/BB.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/BB/Local.json"}}, "emitted_at": 1655893077123} +{"stream": "available_phone_number_countries", "data": {"country_code": "AU", "country": "Australia", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/AU.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/AU/Local.json", "toll_free": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/AU/TollFree.json", "mobile": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/AU/Mobile.json"}}, "emitted_at": 1655893077124} +{"stream": "available_phone_number_countries", "data": {"country_code": "AR", "country": "Argentina", "uri": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/AR.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/AR/Local.json", "toll_free": "/2010-04-01/Accounts/AC4cac489c46197c9ebc91c840120a4dee/AvailablePhoneNumbers/AR/TollFree.json"}}, "emitted_at": 1655893077124} +{"stream": "incoming_phone_numbers", "data": {"sid": "PNe40bd7f3ac343b32fd51275d2d5b3dcc", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "friendly_name": "2FA Number - PLEASE DO NOT TOUCH. Use another number for anythin", "phone_number": "+12056561170", "voice_url": "https://handler.twilio.com/twiml/EH7af811843f38093d724a5c2e80b3eabe", "voice_method": "POST", "voice_fallback_url": "", "voice_fallback_method": "POST", "voice_caller_id_lookup": false, "date_created": "2020-12-11T04:28:40Z", "date_updated": "2021-06-23T23:05:37Z", "sms_url": "https://webhooks.twilio.com/v1/Accounts/ACdade166c12e160e9ed0a6088226718fb/Flows/FWbd726b7110b21294a9f27a47f4ab0080", "sms_method": "POST", "sms_fallback_url": "", "sms_fallback_method": "POST", "address_requirements": "none", "beta": false, "capabilities": {"voice": true, "sms": true, "mms": true}, "status_callback": "", "status_callback_method": "POST", "api_version": "2010-04-01", "voice_application_sid": "", "sms_application_sid": "", "origin": "twilio", "trunk_sid": null, "emergency_status": "Active", "emergency_address_sid": null, "emergency_address_status": "unregistered", "address_sid": null, "identity_sid": null, "bundle_sid": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/IncomingPhoneNumbers/PNe40bd7f3ac343b32fd51275d2d5b3dcc.json", "status": "in-use"}, "emitted_at": 1655893245291} +{"stream": "keys", "data": {"date_updated": "2021-02-01T07:30:21Z", "date_created": "2021-02-01T07:30:21Z", "friendly_name": "Studio API Key", "sid": "SK60085e9cfc3d94aa1b987b25c78067a9"}, "emitted_at": 1655893247168} +{"stream": "calls", "data": {"date_updated": "2022-06-17T22:28:34Z", "price_unit": "USD", "parent_call_sid": null, "caller_name": null, "duration": 61, "from": "+15312726629", "to": "+12056561170", "annotation": null, "answered_by": null, "sid": "CAe71d3c7533543b5c81b1be3fc5affa2b", "queue_time": 0, "price": -0.017, "api_version": "2010-04-01", "status": "completed", "direction": "inbound", "start_time": "2022-06-17T22:27:33Z", "date_created": "2022-06-17T22:27:32Z", "from_formatted": "(531) 272-6629", "group_sid": null, "trunk_sid": "", "forwarded_from": "+12056561170", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAe71d3c7533543b5c81b1be3fc5affa2b.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "end_time": "2022-06-17T22:28:34Z", "to_formatted": "(205) 656-1170", "phone_number_sid": "PNe40bd7f3ac343b32fd51275d2d5b3dcc", "subresource_uris": {"feedback": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAe71d3c7533543b5c81b1be3fc5affa2b/Feedback.json", "notifications": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAe71d3c7533543b5c81b1be3fc5affa2b/Notifications.json", "recordings": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAe71d3c7533543b5c81b1be3fc5affa2b/Recordings.json", "streams": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAe71d3c7533543b5c81b1be3fc5affa2b/Streams.json", "payments": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAe71d3c7533543b5c81b1be3fc5affa2b/Payments.json", "siprec": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAe71d3c7533543b5c81b1be3fc5affa2b/Siprec.json", "events": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAe71d3c7533543b5c81b1be3fc5affa2b/Events.json", "feedback_summaries": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/FeedbackSummary.json"}}, "emitted_at": 1655893249727} +{"stream": "calls", "data": {"date_updated": "2022-06-17T13:36:17Z", "price_unit": "USD", "parent_call_sid": null, "caller_name": null, "duration": 96, "from": "+17372040136", "to": "+12056561170", "annotation": null, "answered_by": null, "sid": "CA0a47223735162e1a7df2738327bda2ab", "queue_time": 0, "price": -0.017, "api_version": "2010-04-01", "status": "completed", "direction": "inbound", "start_time": "2022-06-17T13:34:41Z", "date_created": "2022-06-17T13:34:41Z", "from_formatted": "(737) 204-0136", "group_sid": null, "trunk_sid": "", "forwarded_from": "+12056561170", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA0a47223735162e1a7df2738327bda2ab.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "end_time": "2022-06-17T13:36:17Z", "to_formatted": "(205) 656-1170", "phone_number_sid": "PNe40bd7f3ac343b32fd51275d2d5b3dcc", "subresource_uris": {"feedback": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA0a47223735162e1a7df2738327bda2ab/Feedback.json", "notifications": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA0a47223735162e1a7df2738327bda2ab/Notifications.json", "recordings": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA0a47223735162e1a7df2738327bda2ab/Recordings.json", "streams": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA0a47223735162e1a7df2738327bda2ab/Streams.json", "payments": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA0a47223735162e1a7df2738327bda2ab/Payments.json", "siprec": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA0a47223735162e1a7df2738327bda2ab/Siprec.json", "events": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA0a47223735162e1a7df2738327bda2ab/Events.json", "feedback_summaries": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/FeedbackSummary.json"}}, "emitted_at": 1655893249739} +{"stream": "calls", "data": {"date_updated": "2022-06-16T20:02:43Z", "price_unit": "USD", "parent_call_sid": null, "caller_name": null, "duration": 124, "from": "+17372040136", "to": "+12056561170", "annotation": null, "answered_by": null, "sid": "CAace5c8813c499253bbbff29ad0da0ccb", "queue_time": 0, "price": -0.0255, "api_version": "2010-04-01", "status": "completed", "direction": "inbound", "start_time": "2022-06-16T20:00:39Z", "date_created": "2022-06-16T20:00:39Z", "from_formatted": "(737) 204-0136", "group_sid": null, "trunk_sid": "", "forwarded_from": "+12056561170", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAace5c8813c499253bbbff29ad0da0ccb.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "end_time": "2022-06-16T20:02:43Z", "to_formatted": "(205) 656-1170", "phone_number_sid": "PNe40bd7f3ac343b32fd51275d2d5b3dcc", "subresource_uris": {"feedback": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAace5c8813c499253bbbff29ad0da0ccb/Feedback.json", "notifications": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAace5c8813c499253bbbff29ad0da0ccb/Notifications.json", "recordings": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAace5c8813c499253bbbff29ad0da0ccb/Recordings.json", "streams": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAace5c8813c499253bbbff29ad0da0ccb/Streams.json", "payments": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAace5c8813c499253bbbff29ad0da0ccb/Payments.json", "siprec": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAace5c8813c499253bbbff29ad0da0ccb/Siprec.json", "events": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAace5c8813c499253bbbff29ad0da0ccb/Events.json", "feedback_summaries": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/FeedbackSummary.json"}}, "emitted_at": 1655893249745} +{"stream": "calls", "data": {"date_updated": "2022-06-02T12:54:05Z", "price_unit": "USD", "parent_call_sid": null, "caller_name": null, "duration": 5, "from": "+12059675338", "to": "+12056561170", "annotation": null, "answered_by": null, "sid": "CAa24e9fbcb6eba3c8cfefc248a3c0b5b4", "queue_time": 0, "price": -0.0085, "api_version": "2010-04-01", "status": "completed", "direction": "inbound", "start_time": "2022-06-02T12:54:00Z", "date_created": "2022-06-02T12:54:00Z", "from_formatted": "(205) 967-5338", "group_sid": null, "trunk_sid": "", "forwarded_from": "+12056561170", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAa24e9fbcb6eba3c8cfefc248a3c0b5b4.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "end_time": "2022-06-02T12:54:05Z", "to_formatted": "(205) 656-1170", "phone_number_sid": "PNe40bd7f3ac343b32fd51275d2d5b3dcc", "subresource_uris": {"feedback": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAa24e9fbcb6eba3c8cfefc248a3c0b5b4/Feedback.json", "notifications": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAa24e9fbcb6eba3c8cfefc248a3c0b5b4/Notifications.json", "recordings": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAa24e9fbcb6eba3c8cfefc248a3c0b5b4/Recordings.json", "streams": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAa24e9fbcb6eba3c8cfefc248a3c0b5b4/Streams.json", "payments": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAa24e9fbcb6eba3c8cfefc248a3c0b5b4/Payments.json", "siprec": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAa24e9fbcb6eba3c8cfefc248a3c0b5b4/Siprec.json", "events": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAa24e9fbcb6eba3c8cfefc248a3c0b5b4/Events.json", "feedback_summaries": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/FeedbackSummary.json"}}, "emitted_at": 1655893249752} +{"stream": "calls", "data": {"date_updated": "2022-05-26T22:14:18Z", "price_unit": "USD", "parent_call_sid": null, "caller_name": null, "duration": 69, "from": "+13343585579", "to": "+12056561170", "annotation": null, "answered_by": null, "sid": "CA65f8d6ee9f8783233750f2b0f99cf1b3", "queue_time": 0, "price": -0.017, "api_version": "2010-04-01", "status": "completed", "direction": "inbound", "start_time": "2022-05-26T22:13:09Z", "date_created": "2022-05-26T22:13:09Z", "from_formatted": "(334) 358-5579", "group_sid": null, "trunk_sid": "", "forwarded_from": "+12056561170", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA65f8d6ee9f8783233750f2b0f99cf1b3.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "end_time": "2022-05-26T22:14:18Z", "to_formatted": "(205) 656-1170", "phone_number_sid": "PNe40bd7f3ac343b32fd51275d2d5b3dcc", "subresource_uris": {"feedback": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA65f8d6ee9f8783233750f2b0f99cf1b3/Feedback.json", "notifications": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA65f8d6ee9f8783233750f2b0f99cf1b3/Notifications.json", "recordings": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA65f8d6ee9f8783233750f2b0f99cf1b3/Recordings.json", "streams": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA65f8d6ee9f8783233750f2b0f99cf1b3/Streams.json", "payments": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA65f8d6ee9f8783233750f2b0f99cf1b3/Payments.json", "siprec": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA65f8d6ee9f8783233750f2b0f99cf1b3/Siprec.json", "events": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA65f8d6ee9f8783233750f2b0f99cf1b3/Events.json", "feedback_summaries": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/FeedbackSummary.json"}}, "emitted_at": 1655893249756} +{"stream": "calls", "data": {"date_updated": "2022-05-24T23:00:40Z", "price_unit": "USD", "parent_call_sid": null, "caller_name": null, "duration": 31, "from": "+14156896198", "to": "+12056561170", "annotation": null, "answered_by": null, "sid": "CA5b6907d5ebca072c9bd0f46952b886b6", "queue_time": 0, "price": -0.0085, "api_version": "2010-04-01", "status": "completed", "direction": "inbound", "start_time": "2022-05-24T23:00:09Z", "date_created": "2022-05-24T23:00:09Z", "from_formatted": "(415) 689-6198", "group_sid": null, "trunk_sid": "", "forwarded_from": "+12056561170", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA5b6907d5ebca072c9bd0f46952b886b6.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "end_time": "2022-05-24T23:00:40Z", "to_formatted": "(205) 656-1170", "phone_number_sid": "PNe40bd7f3ac343b32fd51275d2d5b3dcc", "subresource_uris": {"feedback": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA5b6907d5ebca072c9bd0f46952b886b6/Feedback.json", "notifications": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA5b6907d5ebca072c9bd0f46952b886b6/Notifications.json", "recordings": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA5b6907d5ebca072c9bd0f46952b886b6/Recordings.json", "streams": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA5b6907d5ebca072c9bd0f46952b886b6/Streams.json", "payments": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA5b6907d5ebca072c9bd0f46952b886b6/Payments.json", "siprec": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA5b6907d5ebca072c9bd0f46952b886b6/Siprec.json", "events": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA5b6907d5ebca072c9bd0f46952b886b6/Events.json", "feedback_summaries": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/FeedbackSummary.json"}}, "emitted_at": 1655893249759} +{"stream": "calls", "data": {"date_updated": "2022-05-11T18:21:15Z", "price_unit": "USD", "parent_call_sid": null, "caller_name": null, "duration": 23, "from": "+12137661124", "to": "+12056561170", "annotation": null, "answered_by": null, "sid": "CA696bd2d2e37ef8501f443807dce444a9", "queue_time": 0, "price": -0.0085, "api_version": "2010-04-01", "status": "completed", "direction": "inbound", "start_time": "2022-05-11T18:20:52Z", "date_created": "2022-05-11T18:20:52Z", "from_formatted": "(213) 766-1124", "group_sid": null, "trunk_sid": "", "forwarded_from": "+12056561170", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA696bd2d2e37ef8501f443807dce444a9.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "end_time": "2022-05-11T18:21:15Z", "to_formatted": "(205) 656-1170", "phone_number_sid": "PNe40bd7f3ac343b32fd51275d2d5b3dcc", "subresource_uris": {"feedback": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA696bd2d2e37ef8501f443807dce444a9/Feedback.json", "notifications": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA696bd2d2e37ef8501f443807dce444a9/Notifications.json", "recordings": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA696bd2d2e37ef8501f443807dce444a9/Recordings.json", "streams": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA696bd2d2e37ef8501f443807dce444a9/Streams.json", "payments": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA696bd2d2e37ef8501f443807dce444a9/Payments.json", "siprec": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA696bd2d2e37ef8501f443807dce444a9/Siprec.json", "events": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA696bd2d2e37ef8501f443807dce444a9/Events.json", "feedback_summaries": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/FeedbackSummary.json"}}, "emitted_at": 1655893249762} +{"stream": "calls", "data": {"date_updated": "2022-04-20T17:33:25Z", "price_unit": "USD", "parent_call_sid": null, "caller_name": null, "duration": 5, "from": "+12059736828", "to": "+12056561170", "annotation": null, "answered_by": null, "sid": "CAe86d27d7aba7c857135b46f52f578d0b", "queue_time": 0, "price": -0.0085, "api_version": "2010-04-01", "status": "completed", "direction": "inbound", "start_time": "2022-04-20T17:33:20Z", "date_created": "2022-04-20T17:33:20Z", "from_formatted": "(205) 973-6828", "group_sid": null, "trunk_sid": "", "forwarded_from": "+12056561170", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAe86d27d7aba7c857135b46f52f578d0b.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "end_time": "2022-04-20T17:33:25Z", "to_formatted": "(205) 656-1170", "phone_number_sid": "PNe40bd7f3ac343b32fd51275d2d5b3dcc", "subresource_uris": {"feedback": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAe86d27d7aba7c857135b46f52f578d0b/Feedback.json", "notifications": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAe86d27d7aba7c857135b46f52f578d0b/Notifications.json", "recordings": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAe86d27d7aba7c857135b46f52f578d0b/Recordings.json", "streams": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAe86d27d7aba7c857135b46f52f578d0b/Streams.json", "payments": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAe86d27d7aba7c857135b46f52f578d0b/Payments.json", "siprec": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAe86d27d7aba7c857135b46f52f578d0b/Siprec.json", "events": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAe86d27d7aba7c857135b46f52f578d0b/Events.json", "feedback_summaries": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/FeedbackSummary.json"}}, "emitted_at": 1655893249765} +{"stream": "calls", "data": {"date_updated": "2022-04-06T21:01:01Z", "price_unit": "USD", "parent_call_sid": null, "caller_name": null, "duration": 6, "from": "+13017951000", "to": "+12056561170", "annotation": null, "answered_by": null, "sid": "CAade9599c9cf53091c1787898093e2675", "queue_time": 0, "price": -0.0085, "api_version": "2010-04-01", "status": "completed", "direction": "inbound", "start_time": "2022-04-06T21:00:55Z", "date_created": "2022-04-06T21:00:55Z", "from_formatted": "(301) 795-1000", "group_sid": null, "trunk_sid": "", "forwarded_from": "+12056561170", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAade9599c9cf53091c1787898093e2675.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "end_time": "2022-04-06T21:01:01Z", "to_formatted": "(205) 656-1170", "phone_number_sid": "PNe40bd7f3ac343b32fd51275d2d5b3dcc", "subresource_uris": {"feedback": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAade9599c9cf53091c1787898093e2675/Feedback.json", "notifications": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAade9599c9cf53091c1787898093e2675/Notifications.json", "recordings": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAade9599c9cf53091c1787898093e2675/Recordings.json", "streams": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAade9599c9cf53091c1787898093e2675/Streams.json", "payments": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAade9599c9cf53091c1787898093e2675/Payments.json", "siprec": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAade9599c9cf53091c1787898093e2675/Siprec.json", "events": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAade9599c9cf53091c1787898093e2675/Events.json", "feedback_summaries": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/FeedbackSummary.json"}}, "emitted_at": 1655893249767} +{"stream": "calls", "data": {"date_updated": "2022-04-06T20:57:37Z", "price_unit": "USD", "parent_call_sid": null, "caller_name": null, "duration": 6, "from": "+13017951000", "to": "+12056561170", "annotation": null, "answered_by": null, "sid": "CAa3887d4de4849a630bc369351f300171", "queue_time": 0, "price": -0.0085, "api_version": "2010-04-01", "status": "completed", "direction": "inbound", "start_time": "2022-04-06T20:57:31Z", "date_created": "2022-04-06T20:57:31Z", "from_formatted": "(301) 795-1000", "group_sid": null, "trunk_sid": "", "forwarded_from": "+12056561170", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAa3887d4de4849a630bc369351f300171.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "end_time": "2022-04-06T20:57:37Z", "to_formatted": "(205) 656-1170", "phone_number_sid": "PNe40bd7f3ac343b32fd51275d2d5b3dcc", "subresource_uris": {"feedback": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAa3887d4de4849a630bc369351f300171/Feedback.json", "notifications": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAa3887d4de4849a630bc369351f300171/Notifications.json", "recordings": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAa3887d4de4849a630bc369351f300171/Recordings.json", "streams": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAa3887d4de4849a630bc369351f300171/Streams.json", "payments": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAa3887d4de4849a630bc369351f300171/Payments.json", "siprec": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAa3887d4de4849a630bc369351f300171/Siprec.json", "events": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAa3887d4de4849a630bc369351f300171/Events.json", "feedback_summaries": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/FeedbackSummary.json"}}, "emitted_at": 1655893249769} +{"stream": "calls", "data": {"date_updated": "2022-03-13T23:56:37Z", "price_unit": "USD", "parent_call_sid": null, "caller_name": null, "duration": 13, "from": "+12059203962", "to": "+12056561170", "annotation": null, "answered_by": null, "sid": "CA78611ecf5e7f101b1a59be31b8f520f7", "queue_time": 0, "price": -0.0085, "api_version": "2010-04-01", "status": "completed", "direction": "inbound", "start_time": "2022-03-13T23:56:24Z", "date_created": "2022-03-13T23:56:24Z", "from_formatted": "(205) 920-3962", "group_sid": null, "trunk_sid": "", "forwarded_from": "+12056561170", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA78611ecf5e7f101b1a59be31b8f520f7.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "end_time": "2022-03-13T23:56:37Z", "to_formatted": "(205) 656-1170", "phone_number_sid": "PNe40bd7f3ac343b32fd51275d2d5b3dcc", "subresource_uris": {"feedback": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA78611ecf5e7f101b1a59be31b8f520f7/Feedback.json", "notifications": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA78611ecf5e7f101b1a59be31b8f520f7/Notifications.json", "recordings": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA78611ecf5e7f101b1a59be31b8f520f7/Recordings.json", "streams": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA78611ecf5e7f101b1a59be31b8f520f7/Streams.json", "payments": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA78611ecf5e7f101b1a59be31b8f520f7/Payments.json", "siprec": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA78611ecf5e7f101b1a59be31b8f520f7/Siprec.json", "events": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA78611ecf5e7f101b1a59be31b8f520f7/Events.json", "feedback_summaries": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/FeedbackSummary.json"}}, "emitted_at": 1655893249771} +{"stream": "outgoing_caller_ids", "data": {"phone_number": "+14153597503", "date_updated": "2020-11-17T04:17:37Z", "friendly_name": "(415) 359-7503", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/OutgoingCallerIds/PN16ba111c0df5756cfe37044ed0ee3136.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "sid": "PN16ba111c0df5756cfe37044ed0ee3136", "date_created": "2020-11-17T04:17:37Z"}, "emitted_at": 1655893253929} +{"stream": "outgoing_caller_ids", "data": {"phone_number": "+18023494963", "date_updated": "2020-12-11T04:28:02Z", "friendly_name": "(802) 349-4963", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/OutgoingCallerIds/PN726d635f970c30193cd12e7b994510a1.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "sid": "PN726d635f970c30193cd12e7b994510a1", "date_created": "2020-12-11T04:28:02Z"}, "emitted_at": 1655893253943} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAe71d3c7533543b5c81b1be3fc5affa2b", "conference_sid": null, "date_created": "2022-06-17T22:28:34Z", "date_updated": "2022-06-17T22:28:34Z", "start_time": "2022-06-17T22:28:34Z", "duration": 1, "sid": "REa8c057eb787b1de63c92eaef1dd93451", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa8c057eb787b1de63c92eaef1dd93451.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa8c057eb787b1de63c92eaef1dd93451/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa8c057eb787b1de63c92eaef1dd93451/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa8c057eb787b1de63c92eaef1dd93451"}, "emitted_at": 1655893266498} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAe71d3c7533543b5c81b1be3fc5affa2b", "conference_sid": null, "date_created": "2022-06-17T22:28:32Z", "date_updated": "2022-06-17T22:28:33Z", "start_time": "2022-06-17T22:28:32Z", "duration": 1, "sid": "REf3305256d56b0ee4c37ee883ccfca6ff", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REf3305256d56b0ee4c37ee883ccfca6ff.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REf3305256d56b0ee4c37ee883ccfca6ff/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REf3305256d56b0ee4c37ee883ccfca6ff/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REf3305256d56b0ee4c37ee883ccfca6ff"}, "emitted_at": 1655893266500} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAe71d3c7533543b5c81b1be3fc5affa2b", "conference_sid": null, "date_created": "2022-06-17T22:28:31Z", "date_updated": "2022-06-17T22:28:31Z", "start_time": "2022-06-17T22:28:30Z", "duration": 1, "sid": "REc898baa5689053d0c3520079c09e69e0", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REc898baa5689053d0c3520079c09e69e0.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REc898baa5689053d0c3520079c09e69e0/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REc898baa5689053d0c3520079c09e69e0/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REc898baa5689053d0c3520079c09e69e0"}, "emitted_at": 1655893266501} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAe71d3c7533543b5c81b1be3fc5affa2b", "conference_sid": null, "date_created": "2022-06-17T22:28:29Z", "date_updated": "2022-06-17T22:28:30Z", "start_time": "2022-06-17T22:28:29Z", "duration": 1, "sid": "RE8fde6c8bce0d0b91f1f52ae7dd9b587f", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8fde6c8bce0d0b91f1f52ae7dd9b587f.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8fde6c8bce0d0b91f1f52ae7dd9b587f/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8fde6c8bce0d0b91f1f52ae7dd9b587f/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8fde6c8bce0d0b91f1f52ae7dd9b587f"}, "emitted_at": 1655893266503} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAe71d3c7533543b5c81b1be3fc5affa2b", "conference_sid": null, "date_created": "2022-06-17T22:28:28Z", "date_updated": "2022-06-17T22:28:28Z", "start_time": "2022-06-17T22:28:27Z", "duration": 1, "sid": "REee43502d9bf49f767055e46177bbe5f1", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REee43502d9bf49f767055e46177bbe5f1.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REee43502d9bf49f767055e46177bbe5f1/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REee43502d9bf49f767055e46177bbe5f1/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REee43502d9bf49f767055e46177bbe5f1"}, "emitted_at": 1655893266504} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAe71d3c7533543b5c81b1be3fc5affa2b", "conference_sid": null, "date_created": "2022-06-17T22:28:26Z", "date_updated": "2022-06-17T22:28:26Z", "start_time": "2022-06-17T22:28:26Z", "duration": 1, "sid": "REd8e883b1548347064e8eb97f7300950a", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REd8e883b1548347064e8eb97f7300950a.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REd8e883b1548347064e8eb97f7300950a/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REd8e883b1548347064e8eb97f7300950a/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REd8e883b1548347064e8eb97f7300950a"}, "emitted_at": 1655893266505} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAe71d3c7533543b5c81b1be3fc5affa2b", "conference_sid": null, "date_created": "2022-06-17T22:28:24Z", "date_updated": "2022-06-17T22:28:25Z", "start_time": "2022-06-17T22:28:24Z", "duration": 1, "sid": "RE34d8c1d3a60ceb4e371d08bb31cb1ee5", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE34d8c1d3a60ceb4e371d08bb31cb1ee5.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE34d8c1d3a60ceb4e371d08bb31cb1ee5/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE34d8c1d3a60ceb4e371d08bb31cb1ee5/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE34d8c1d3a60ceb4e371d08bb31cb1ee5"}, "emitted_at": 1655893266506} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAe71d3c7533543b5c81b1be3fc5affa2b", "conference_sid": null, "date_created": "2022-06-17T22:28:23Z", "date_updated": "2022-06-17T22:28:23Z", "start_time": "2022-06-17T22:28:22Z", "duration": 1, "sid": "RE063d90ddefe99041febf577b3d5654b2", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE063d90ddefe99041febf577b3d5654b2.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE063d90ddefe99041febf577b3d5654b2/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE063d90ddefe99041febf577b3d5654b2/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE063d90ddefe99041febf577b3d5654b2"}, "emitted_at": 1655893266507} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAe71d3c7533543b5c81b1be3fc5affa2b", "conference_sid": null, "date_created": "2022-06-17T22:28:21Z", "date_updated": "2022-06-17T22:28:22Z", "start_time": "2022-06-17T22:28:21Z", "duration": 1, "sid": "REa0dee82b8d4ad3400cf148587d080645", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa0dee82b8d4ad3400cf148587d080645.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa0dee82b8d4ad3400cf148587d080645/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa0dee82b8d4ad3400cf148587d080645/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa0dee82b8d4ad3400cf148587d080645"}, "emitted_at": 1655893266508} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAe71d3c7533543b5c81b1be3fc5affa2b", "conference_sid": null, "date_created": "2022-06-17T22:28:20Z", "date_updated": "2022-06-17T22:28:20Z", "start_time": "2022-06-17T22:28:19Z", "duration": 1, "sid": "RE3ad8cef5ad8e6dd56922050a5abc1aee", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE3ad8cef5ad8e6dd56922050a5abc1aee.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE3ad8cef5ad8e6dd56922050a5abc1aee/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE3ad8cef5ad8e6dd56922050a5abc1aee/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE3ad8cef5ad8e6dd56922050a5abc1aee"}, "emitted_at": 1655893266509} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAe71d3c7533543b5c81b1be3fc5affa2b", "conference_sid": null, "date_created": "2022-06-17T22:28:18Z", "date_updated": "2022-06-17T22:28:19Z", "start_time": "2022-06-17T22:28:18Z", "duration": 1, "sid": "REa1a1072c42fa1cdd4facd6bbda01e690", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa1a1072c42fa1cdd4facd6bbda01e690.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa1a1072c42fa1cdd4facd6bbda01e690/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa1a1072c42fa1cdd4facd6bbda01e690/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa1a1072c42fa1cdd4facd6bbda01e690"}, "emitted_at": 1655893266510} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAe71d3c7533543b5c81b1be3fc5affa2b", "conference_sid": null, "date_created": "2022-06-17T22:28:17Z", "date_updated": "2022-06-17T22:28:17Z", "start_time": "2022-06-17T22:28:16Z", "duration": 1, "sid": "RE57cfe6ce955d565f16a6f264c4650210", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE57cfe6ce955d565f16a6f264c4650210.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE57cfe6ce955d565f16a6f264c4650210/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE57cfe6ce955d565f16a6f264c4650210/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE57cfe6ce955d565f16a6f264c4650210"}, "emitted_at": 1655893266511} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAe71d3c7533543b5c81b1be3fc5affa2b", "conference_sid": null, "date_created": "2022-06-17T22:28:15Z", "date_updated": "2022-06-17T22:28:15Z", "start_time": "2022-06-17T22:28:15Z", "duration": 1, "sid": "REe1a53985caed4d2200ab3cca069d5209", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe1a53985caed4d2200ab3cca069d5209.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe1a53985caed4d2200ab3cca069d5209/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe1a53985caed4d2200ab3cca069d5209/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe1a53985caed4d2200ab3cca069d5209"}, "emitted_at": 1655893266512} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAe71d3c7533543b5c81b1be3fc5affa2b", "conference_sid": null, "date_created": "2022-06-17T22:28:13Z", "date_updated": "2022-06-17T22:28:14Z", "start_time": "2022-06-17T22:28:13Z", "duration": 1, "sid": "RE72aaed1be07ae09af3b722d9fc9181f5", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE72aaed1be07ae09af3b722d9fc9181f5.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE72aaed1be07ae09af3b722d9fc9181f5/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE72aaed1be07ae09af3b722d9fc9181f5/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE72aaed1be07ae09af3b722d9fc9181f5"}, "emitted_at": 1655893266513} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAe71d3c7533543b5c81b1be3fc5affa2b", "conference_sid": null, "date_created": "2022-06-17T22:28:12Z", "date_updated": "2022-06-17T22:28:12Z", "start_time": "2022-06-17T22:28:11Z", "duration": 1, "sid": "REed0bf01d1fadd1b4b3d2dbca3ecf6fc4", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REed0bf01d1fadd1b4b3d2dbca3ecf6fc4.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REed0bf01d1fadd1b4b3d2dbca3ecf6fc4/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REed0bf01d1fadd1b4b3d2dbca3ecf6fc4/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REed0bf01d1fadd1b4b3d2dbca3ecf6fc4"}, "emitted_at": 1655893266514} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAe71d3c7533543b5c81b1be3fc5affa2b", "conference_sid": null, "date_created": "2022-06-17T22:28:10Z", "date_updated": "2022-06-17T22:28:10Z", "start_time": "2022-06-17T22:28:10Z", "duration": 1, "sid": "RE8d780d9fe16adbcae260a16fb5f0059a", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8d780d9fe16adbcae260a16fb5f0059a.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8d780d9fe16adbcae260a16fb5f0059a/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8d780d9fe16adbcae260a16fb5f0059a/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8d780d9fe16adbcae260a16fb5f0059a"}, "emitted_at": 1655893266515} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAe71d3c7533543b5c81b1be3fc5affa2b", "conference_sid": null, "date_created": "2022-06-17T22:28:09Z", "date_updated": "2022-06-17T22:28:09Z", "start_time": "2022-06-17T22:28:08Z", "duration": 1, "sid": "REe294fcd0bd7346a2c0642f79cda4a588", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe294fcd0bd7346a2c0642f79cda4a588.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe294fcd0bd7346a2c0642f79cda4a588/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe294fcd0bd7346a2c0642f79cda4a588/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe294fcd0bd7346a2c0642f79cda4a588"}, "emitted_at": 1655893266517} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAe71d3c7533543b5c81b1be3fc5affa2b", "conference_sid": null, "date_created": "2022-06-17T22:28:07Z", "date_updated": "2022-06-17T22:28:07Z", "start_time": "2022-06-17T22:28:07Z", "duration": 1, "sid": "RE8b2ae55921bcde2e2939e0084bb15c1d", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8b2ae55921bcde2e2939e0084bb15c1d.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8b2ae55921bcde2e2939e0084bb15c1d/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8b2ae55921bcde2e2939e0084bb15c1d/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8b2ae55921bcde2e2939e0084bb15c1d"}, "emitted_at": 1655893266518} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAe71d3c7533543b5c81b1be3fc5affa2b", "conference_sid": null, "date_created": "2022-06-17T22:28:06Z", "date_updated": "2022-06-17T22:28:06Z", "start_time": "2022-06-17T22:28:05Z", "duration": 1, "sid": "RE10820d30261ac06aba22796a429ca228", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE10820d30261ac06aba22796a429ca228.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE10820d30261ac06aba22796a429ca228/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE10820d30261ac06aba22796a429ca228/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE10820d30261ac06aba22796a429ca228"}, "emitted_at": 1655893266519} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAe71d3c7533543b5c81b1be3fc5affa2b", "conference_sid": null, "date_created": "2022-06-17T22:28:04Z", "date_updated": "2022-06-17T22:28:04Z", "start_time": "2022-06-17T22:28:04Z", "duration": 1, "sid": "REa62714d099d5c53ea859851fa928d1e6", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa62714d099d5c53ea859851fa928d1e6.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa62714d099d5c53ea859851fa928d1e6/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa62714d099d5c53ea859851fa928d1e6/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa62714d099d5c53ea859851fa928d1e6"}, "emitted_at": 1655893266520} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAe71d3c7533543b5c81b1be3fc5affa2b", "conference_sid": null, "date_created": "2022-06-17T22:28:02Z", "date_updated": "2022-06-17T22:28:03Z", "start_time": "2022-06-17T22:28:02Z", "duration": 1, "sid": "RE27b0159a84e9508a3ab4993b5bb14395", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE27b0159a84e9508a3ab4993b5bb14395.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE27b0159a84e9508a3ab4993b5bb14395/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE27b0159a84e9508a3ab4993b5bb14395/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE27b0159a84e9508a3ab4993b5bb14395"}, "emitted_at": 1655893266521} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAe71d3c7533543b5c81b1be3fc5affa2b", "conference_sid": null, "date_created": "2022-06-17T22:28:01Z", "date_updated": "2022-06-17T22:28:01Z", "start_time": "2022-06-17T22:28:00Z", "duration": 1, "sid": "RE8803088187cd2f307573d336fb4a8f87", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8803088187cd2f307573d336fb4a8f87.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8803088187cd2f307573d336fb4a8f87/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8803088187cd2f307573d336fb4a8f87/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8803088187cd2f307573d336fb4a8f87"}, "emitted_at": 1655893266522} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAe71d3c7533543b5c81b1be3fc5affa2b", "conference_sid": null, "date_created": "2022-06-17T22:27:59Z", "date_updated": "2022-06-17T22:28:00Z", "start_time": "2022-06-17T22:27:59Z", "duration": 1, "sid": "REa2d28f35df1892fb26f7c6001d034b0c", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa2d28f35df1892fb26f7c6001d034b0c.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa2d28f35df1892fb26f7c6001d034b0c/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa2d28f35df1892fb26f7c6001d034b0c/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa2d28f35df1892fb26f7c6001d034b0c"}, "emitted_at": 1655893266523} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAe71d3c7533543b5c81b1be3fc5affa2b", "conference_sid": null, "date_created": "2022-06-17T22:27:58Z", "date_updated": "2022-06-17T22:27:58Z", "start_time": "2022-06-17T22:27:57Z", "duration": 1, "sid": "RE3bd54ed8375d1bcbd97ad64c2113fae2", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE3bd54ed8375d1bcbd97ad64c2113fae2.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE3bd54ed8375d1bcbd97ad64c2113fae2/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE3bd54ed8375d1bcbd97ad64c2113fae2/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE3bd54ed8375d1bcbd97ad64c2113fae2"}, "emitted_at": 1655893266524} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAe71d3c7533543b5c81b1be3fc5affa2b", "conference_sid": null, "date_created": "2022-06-17T22:27:52Z", "date_updated": "2022-06-17T22:27:56Z", "start_time": "2022-06-17T22:27:51Z", "duration": 4, "sid": "RE71fc6f69d0b58d97fa2e0e94a6b28d39", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE71fc6f69d0b58d97fa2e0e94a6b28d39.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE71fc6f69d0b58d97fa2e0e94a6b28d39/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE71fc6f69d0b58d97fa2e0e94a6b28d39/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE71fc6f69d0b58d97fa2e0e94a6b28d39"}, "emitted_at": 1655893266525} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAe71d3c7533543b5c81b1be3fc5affa2b", "conference_sid": null, "date_created": "2022-06-17T22:27:50Z", "date_updated": "2022-06-17T22:27:51Z", "start_time": "2022-06-17T22:27:50Z", "duration": 1, "sid": "REcd19023c475c7736baa6f58331a7e88f", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REcd19023c475c7736baa6f58331a7e88f.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REcd19023c475c7736baa6f58331a7e88f/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REcd19023c475c7736baa6f58331a7e88f/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REcd19023c475c7736baa6f58331a7e88f"}, "emitted_at": 1655893266526} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAe71d3c7533543b5c81b1be3fc5affa2b", "conference_sid": null, "date_created": "2022-06-17T22:27:48Z", "date_updated": "2022-06-17T22:27:49Z", "start_time": "2022-06-17T22:27:48Z", "duration": 1, "sid": "RE06f66a445d2ce0a9ee81a1ee837f6295", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE06f66a445d2ce0a9ee81a1ee837f6295.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE06f66a445d2ce0a9ee81a1ee837f6295/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE06f66a445d2ce0a9ee81a1ee837f6295/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE06f66a445d2ce0a9ee81a1ee837f6295"}, "emitted_at": 1655893266527} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAe71d3c7533543b5c81b1be3fc5affa2b", "conference_sid": null, "date_created": "2022-06-17T22:27:38Z", "date_updated": "2022-06-17T22:27:47Z", "start_time": "2022-06-17T22:27:38Z", "duration": 9, "sid": "RE23c1f81c23a44f80ccb984129db33a10", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE23c1f81c23a44f80ccb984129db33a10.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE23c1f81c23a44f80ccb984129db33a10/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE23c1f81c23a44f80ccb984129db33a10/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE23c1f81c23a44f80ccb984129db33a10"}, "emitted_at": 1655893266528} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAe71d3c7533543b5c81b1be3fc5affa2b", "conference_sid": null, "date_created": "2022-06-17T22:27:37Z", "date_updated": "2022-06-17T22:27:37Z", "start_time": "2022-06-17T22:27:36Z", "duration": 1, "sid": "RE98ff4be60f6e1ec2a70ddc03510a7b65", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE98ff4be60f6e1ec2a70ddc03510a7b65.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE98ff4be60f6e1ec2a70ddc03510a7b65/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE98ff4be60f6e1ec2a70ddc03510a7b65/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE98ff4be60f6e1ec2a70ddc03510a7b65"}, "emitted_at": 1655893266529} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAe71d3c7533543b5c81b1be3fc5affa2b", "conference_sid": null, "date_created": "2022-06-17T22:27:35Z", "date_updated": "2022-06-17T22:27:35Z", "start_time": "2022-06-17T22:27:35Z", "duration": 1, "sid": "RE167fc5a62a8f8fe28e24095164cb86f1", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE167fc5a62a8f8fe28e24095164cb86f1.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE167fc5a62a8f8fe28e24095164cb86f1/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE167fc5a62a8f8fe28e24095164cb86f1/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE167fc5a62a8f8fe28e24095164cb86f1"}, "emitted_at": 1655893266530} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:36:16Z", "date_updated": "2022-06-17T13:36:16Z", "start_time": "2022-06-17T13:36:16Z", "duration": 1, "sid": "REc859d7bb28ee8235200f3be49c4a9fc1", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REc859d7bb28ee8235200f3be49c4a9fc1.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REc859d7bb28ee8235200f3be49c4a9fc1/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REc859d7bb28ee8235200f3be49c4a9fc1/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REc859d7bb28ee8235200f3be49c4a9fc1"}, "emitted_at": 1655893266532} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:36:15Z", "date_updated": "2022-06-17T13:36:15Z", "start_time": "2022-06-17T13:36:14Z", "duration": 1, "sid": "REc0887a4b2facff1137d503aea20b21e6", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REc0887a4b2facff1137d503aea20b21e6.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REc0887a4b2facff1137d503aea20b21e6/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REc0887a4b2facff1137d503aea20b21e6/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REc0887a4b2facff1137d503aea20b21e6"}, "emitted_at": 1655893266533} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:36:13Z", "date_updated": "2022-06-17T13:36:13Z", "start_time": "2022-06-17T13:36:13Z", "duration": 1, "sid": "RE5b1fecd25e5a36b7b401c242ea48ec4d", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE5b1fecd25e5a36b7b401c242ea48ec4d.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE5b1fecd25e5a36b7b401c242ea48ec4d/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE5b1fecd25e5a36b7b401c242ea48ec4d/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE5b1fecd25e5a36b7b401c242ea48ec4d"}, "emitted_at": 1655893266534} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:36:11Z", "date_updated": "2022-06-17T13:36:12Z", "start_time": "2022-06-17T13:36:11Z", "duration": 1, "sid": "REcb96f6a7c31844b3b32086227a7413fd", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REcb96f6a7c31844b3b32086227a7413fd.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REcb96f6a7c31844b3b32086227a7413fd/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REcb96f6a7c31844b3b32086227a7413fd/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REcb96f6a7c31844b3b32086227a7413fd"}, "emitted_at": 1655893266535} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:36:10Z", "date_updated": "2022-06-17T13:36:10Z", "start_time": "2022-06-17T13:36:09Z", "duration": 1, "sid": "RE2dcb6894878e884c30aa4d5de079369a", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE2dcb6894878e884c30aa4d5de079369a.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE2dcb6894878e884c30aa4d5de079369a/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE2dcb6894878e884c30aa4d5de079369a/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE2dcb6894878e884c30aa4d5de079369a"}, "emitted_at": 1655893266536} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:36:08Z", "date_updated": "2022-06-17T13:36:08Z", "start_time": "2022-06-17T13:36:08Z", "duration": 1, "sid": "RE71a04bc7ffdc164ac56e53ef1bb3192a", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE71a04bc7ffdc164ac56e53ef1bb3192a.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE71a04bc7ffdc164ac56e53ef1bb3192a/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE71a04bc7ffdc164ac56e53ef1bb3192a/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE71a04bc7ffdc164ac56e53ef1bb3192a"}, "emitted_at": 1655893266537} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:36:07Z", "date_updated": "2022-06-17T13:36:07Z", "start_time": "2022-06-17T13:36:06Z", "duration": 1, "sid": "RE5799f1cbe2040e0f4feffabdf4205e0f", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE5799f1cbe2040e0f4feffabdf4205e0f.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE5799f1cbe2040e0f4feffabdf4205e0f/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE5799f1cbe2040e0f4feffabdf4205e0f/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE5799f1cbe2040e0f4feffabdf4205e0f"}, "emitted_at": 1655893266538} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:36:05Z", "date_updated": "2022-06-17T13:36:05Z", "start_time": "2022-06-17T13:36:04Z", "duration": 1, "sid": "REf9da8f2ee24dd48ec0d65efb7b046713", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REf9da8f2ee24dd48ec0d65efb7b046713.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REf9da8f2ee24dd48ec0d65efb7b046713/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REf9da8f2ee24dd48ec0d65efb7b046713/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REf9da8f2ee24dd48ec0d65efb7b046713"}, "emitted_at": 1655893266539} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:36:03Z", "date_updated": "2022-06-17T13:36:03Z", "start_time": "2022-06-17T13:36:03Z", "duration": 1, "sid": "RE82a084056088352813ef188cbd1bfc94", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE82a084056088352813ef188cbd1bfc94.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE82a084056088352813ef188cbd1bfc94/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE82a084056088352813ef188cbd1bfc94/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE82a084056088352813ef188cbd1bfc94"}, "emitted_at": 1655893266540} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:36:02Z", "date_updated": "2022-06-17T13:36:02Z", "start_time": "2022-06-17T13:36:01Z", "duration": 1, "sid": "REfdbd40d53da501f4899db3cb0603079c", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REfdbd40d53da501f4899db3cb0603079c.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REfdbd40d53da501f4899db3cb0603079c/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REfdbd40d53da501f4899db3cb0603079c/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REfdbd40d53da501f4899db3cb0603079c"}, "emitted_at": 1655893266541} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:36:00Z", "date_updated": "2022-06-17T13:36:00Z", "start_time": "2022-06-17T13:36:00Z", "duration": 1, "sid": "REd0bc1aa266edaef6b7609b0e53183188", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REd0bc1aa266edaef6b7609b0e53183188.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REd0bc1aa266edaef6b7609b0e53183188/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REd0bc1aa266edaef6b7609b0e53183188/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REd0bc1aa266edaef6b7609b0e53183188"}, "emitted_at": 1655893266542} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:58Z", "date_updated": "2022-06-17T13:35:59Z", "start_time": "2022-06-17T13:35:58Z", "duration": 1, "sid": "REfe32fa293192aa93ce2ca0eeada6d040", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REfe32fa293192aa93ce2ca0eeada6d040.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REfe32fa293192aa93ce2ca0eeada6d040/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REfe32fa293192aa93ce2ca0eeada6d040/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REfe32fa293192aa93ce2ca0eeada6d040"}, "emitted_at": 1655893266543} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:57Z", "date_updated": "2022-06-17T13:35:57Z", "start_time": "2022-06-17T13:35:56Z", "duration": 1, "sid": "RE9625b764f6a22bcad8f26af9b3785ef6", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9625b764f6a22bcad8f26af9b3785ef6.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9625b764f6a22bcad8f26af9b3785ef6/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9625b764f6a22bcad8f26af9b3785ef6/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9625b764f6a22bcad8f26af9b3785ef6"}, "emitted_at": 1655893266544} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:55Z", "date_updated": "2022-06-17T13:35:55Z", "start_time": "2022-06-17T13:35:55Z", "duration": 1, "sid": "REf9c06319d1022419a85ec8a5e1aa1cea", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REf9c06319d1022419a85ec8a5e1aa1cea.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REf9c06319d1022419a85ec8a5e1aa1cea/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REf9c06319d1022419a85ec8a5e1aa1cea/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REf9c06319d1022419a85ec8a5e1aa1cea"}, "emitted_at": 1655893266545} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:54Z", "date_updated": "2022-06-17T13:35:54Z", "start_time": "2022-06-17T13:35:53Z", "duration": 1, "sid": "RE27a5335892861948409828a125956a9c", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE27a5335892861948409828a125956a9c.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE27a5335892861948409828a125956a9c/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE27a5335892861948409828a125956a9c/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE27a5335892861948409828a125956a9c"}, "emitted_at": 1655893266546} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:52Z", "date_updated": "2022-06-17T13:35:52Z", "start_time": "2022-06-17T13:35:52Z", "duration": 1, "sid": "RE3a4e53f46f576807ffe2507f4927a3f5", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE3a4e53f46f576807ffe2507f4927a3f5.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE3a4e53f46f576807ffe2507f4927a3f5/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE3a4e53f46f576807ffe2507f4927a3f5/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE3a4e53f46f576807ffe2507f4927a3f5"}, "emitted_at": 1655893266547} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:51Z", "date_updated": "2022-06-17T13:35:51Z", "start_time": "2022-06-17T13:35:50Z", "duration": 1, "sid": "REdd96a152c36888e0d3d24dda821c3fc7", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REdd96a152c36888e0d3d24dda821c3fc7.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REdd96a152c36888e0d3d24dda821c3fc7/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REdd96a152c36888e0d3d24dda821c3fc7/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REdd96a152c36888e0d3d24dda821c3fc7"}, "emitted_at": 1655893266548} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:49Z", "date_updated": "2022-06-17T13:35:49Z", "start_time": "2022-06-17T13:35:48Z", "duration": 1, "sid": "RE84f67f2c99bd95e58fdfcf2ad74021ef", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE84f67f2c99bd95e58fdfcf2ad74021ef.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE84f67f2c99bd95e58fdfcf2ad74021ef/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE84f67f2c99bd95e58fdfcf2ad74021ef/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE84f67f2c99bd95e58fdfcf2ad74021ef"}, "emitted_at": 1655893266549} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:47Z", "date_updated": "2022-06-17T13:35:48Z", "start_time": "2022-06-17T13:35:47Z", "duration": 1, "sid": "REb8ed060d4e2bb41409e06129db59e98f", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb8ed060d4e2bb41409e06129db59e98f.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb8ed060d4e2bb41409e06129db59e98f/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb8ed060d4e2bb41409e06129db59e98f/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb8ed060d4e2bb41409e06129db59e98f"}, "emitted_at": 1655893266550} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:46Z", "date_updated": "2022-06-17T13:35:46Z", "start_time": "2022-06-17T13:35:45Z", "duration": 1, "sid": "RE560bc49437eecec7f3dd450e3fccce65", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE560bc49437eecec7f3dd450e3fccce65.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE560bc49437eecec7f3dd450e3fccce65/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE560bc49437eecec7f3dd450e3fccce65/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE560bc49437eecec7f3dd450e3fccce65"}, "emitted_at": 1655893266551} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:44Z", "date_updated": "2022-06-17T13:35:45Z", "start_time": "2022-06-17T13:35:44Z", "duration": 1, "sid": "REe86da9156c119eccc0f788ab97bc8bac", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe86da9156c119eccc0f788ab97bc8bac.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe86da9156c119eccc0f788ab97bc8bac/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe86da9156c119eccc0f788ab97bc8bac/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe86da9156c119eccc0f788ab97bc8bac"}, "emitted_at": 1655893266552} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:43Z", "date_updated": "2022-06-17T13:35:43Z", "start_time": "2022-06-17T13:35:42Z", "duration": 1, "sid": "REac0bc35987b2ab8ef338c3095bd5f889", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REac0bc35987b2ab8ef338c3095bd5f889.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REac0bc35987b2ab8ef338c3095bd5f889/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REac0bc35987b2ab8ef338c3095bd5f889/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REac0bc35987b2ab8ef338c3095bd5f889"}, "emitted_at": 1655893266553} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:41Z", "date_updated": "2022-06-17T13:35:41Z", "start_time": "2022-06-17T13:35:40Z", "duration": 1, "sid": "REb7089180a8a23ab4e3d6dda41c4eddc9", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb7089180a8a23ab4e3d6dda41c4eddc9.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb7089180a8a23ab4e3d6dda41c4eddc9/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb7089180a8a23ab4e3d6dda41c4eddc9/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb7089180a8a23ab4e3d6dda41c4eddc9"}, "emitted_at": 1655893266554} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:40Z", "date_updated": "2022-06-17T13:35:40Z", "start_time": "2022-06-17T13:35:39Z", "duration": 1, "sid": "REe9d545c94659b5e0540897c90ee93750", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe9d545c94659b5e0540897c90ee93750.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe9d545c94659b5e0540897c90ee93750/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe9d545c94659b5e0540897c90ee93750/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe9d545c94659b5e0540897c90ee93750"}, "emitted_at": 1655893266555} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:38Z", "date_updated": "2022-06-17T13:35:38Z", "start_time": "2022-06-17T13:35:37Z", "duration": 1, "sid": "REd9db8615d297d34f1338ea17db4920cf", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REd9db8615d297d34f1338ea17db4920cf.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REd9db8615d297d34f1338ea17db4920cf/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REd9db8615d297d34f1338ea17db4920cf/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REd9db8615d297d34f1338ea17db4920cf"}, "emitted_at": 1655893266556} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:36Z", "date_updated": "2022-06-17T13:35:37Z", "start_time": "2022-06-17T13:35:36Z", "duration": 1, "sid": "REa43b05f02613287758265bea1f694e2d", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa43b05f02613287758265bea1f694e2d.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa43b05f02613287758265bea1f694e2d/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa43b05f02613287758265bea1f694e2d/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa43b05f02613287758265bea1f694e2d"}, "emitted_at": 1655893266557} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:35Z", "date_updated": "2022-06-17T13:35:35Z", "start_time": "2022-06-17T13:35:34Z", "duration": 1, "sid": "RE02a60572ccee575fccbfd02efced705c", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE02a60572ccee575fccbfd02efced705c.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE02a60572ccee575fccbfd02efced705c/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE02a60572ccee575fccbfd02efced705c/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE02a60572ccee575fccbfd02efced705c"}, "emitted_at": 1655893266558} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:33Z", "date_updated": "2022-06-17T13:35:33Z", "start_time": "2022-06-17T13:35:33Z", "duration": 1, "sid": "REb9d31f7cb7e9adc461e9816e44f2978a", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb9d31f7cb7e9adc461e9816e44f2978a.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb9d31f7cb7e9adc461e9816e44f2978a/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb9d31f7cb7e9adc461e9816e44f2978a/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb9d31f7cb7e9adc461e9816e44f2978a"}, "emitted_at": 1655893266559} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:31Z", "date_updated": "2022-06-17T13:35:32Z", "start_time": "2022-06-17T13:35:31Z", "duration": 1, "sid": "RE46fd287f56f5912da3e6ef275d51299b", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE46fd287f56f5912da3e6ef275d51299b.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE46fd287f56f5912da3e6ef275d51299b/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE46fd287f56f5912da3e6ef275d51299b/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE46fd287f56f5912da3e6ef275d51299b"}, "emitted_at": 1655893266560} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:30Z", "date_updated": "2022-06-17T13:35:30Z", "start_time": "2022-06-17T13:35:29Z", "duration": 1, "sid": "RE36dd041b437725fd2baef501a4c26cf3", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE36dd041b437725fd2baef501a4c26cf3.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE36dd041b437725fd2baef501a4c26cf3/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE36dd041b437725fd2baef501a4c26cf3/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE36dd041b437725fd2baef501a4c26cf3"}, "emitted_at": 1655893266561} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:28Z", "date_updated": "2022-06-17T13:35:29Z", "start_time": "2022-06-17T13:35:28Z", "duration": 1, "sid": "REcf3c319bdef8ba2dd5945347a907a117", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REcf3c319bdef8ba2dd5945347a907a117.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REcf3c319bdef8ba2dd5945347a907a117/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REcf3c319bdef8ba2dd5945347a907a117/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REcf3c319bdef8ba2dd5945347a907a117"}, "emitted_at": 1655893266562} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:27Z", "date_updated": "2022-06-17T13:35:27Z", "start_time": "2022-06-17T13:35:26Z", "duration": 1, "sid": "RE115efa4c06f53e04cf7fd54438474659", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE115efa4c06f53e04cf7fd54438474659.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE115efa4c06f53e04cf7fd54438474659/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE115efa4c06f53e04cf7fd54438474659/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE115efa4c06f53e04cf7fd54438474659"}, "emitted_at": 1655893266563} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:25Z", "date_updated": "2022-06-17T13:35:25Z", "start_time": "2022-06-17T13:35:25Z", "duration": 1, "sid": "REba2b65355ef209c0f31a7a8dcbf3d6d2", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REba2b65355ef209c0f31a7a8dcbf3d6d2.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REba2b65355ef209c0f31a7a8dcbf3d6d2/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REba2b65355ef209c0f31a7a8dcbf3d6d2/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REba2b65355ef209c0f31a7a8dcbf3d6d2"}, "emitted_at": 1655893266564} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:24Z", "date_updated": "2022-06-17T13:35:24Z", "start_time": "2022-06-17T13:35:23Z", "duration": 1, "sid": "RE9a617be4dbac683ddfc59e36fba9b263", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9a617be4dbac683ddfc59e36fba9b263.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9a617be4dbac683ddfc59e36fba9b263/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9a617be4dbac683ddfc59e36fba9b263/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9a617be4dbac683ddfc59e36fba9b263"}, "emitted_at": 1655893266565} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:22Z", "date_updated": "2022-06-17T13:35:22Z", "start_time": "2022-06-17T13:35:21Z", "duration": 1, "sid": "RE3255c00c8836585b0d851669212bfd33", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE3255c00c8836585b0d851669212bfd33.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE3255c00c8836585b0d851669212bfd33/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE3255c00c8836585b0d851669212bfd33/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE3255c00c8836585b0d851669212bfd33"}, "emitted_at": 1655893266566} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:20Z", "date_updated": "2022-06-17T13:35:21Z", "start_time": "2022-06-17T13:35:20Z", "duration": 1, "sid": "RE9b4a4c607f1d5dced4aa3b3d43027f6c", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9b4a4c607f1d5dced4aa3b3d43027f6c.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9b4a4c607f1d5dced4aa3b3d43027f6c/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9b4a4c607f1d5dced4aa3b3d43027f6c/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9b4a4c607f1d5dced4aa3b3d43027f6c"}, "emitted_at": 1655893266566} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:19Z", "date_updated": "2022-06-17T13:35:19Z", "start_time": "2022-06-17T13:35:18Z", "duration": 1, "sid": "RE9783d51eb481bb328ce2585f029a1774", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9783d51eb481bb328ce2585f029a1774.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9783d51eb481bb328ce2585f029a1774/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9783d51eb481bb328ce2585f029a1774/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9783d51eb481bb328ce2585f029a1774"}, "emitted_at": 1655893266567} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:17Z", "date_updated": "2022-06-17T13:35:17Z", "start_time": "2022-06-17T13:35:17Z", "duration": 1, "sid": "REd1846952e3c3581d5ad63fffa61552db", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REd1846952e3c3581d5ad63fffa61552db.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REd1846952e3c3581d5ad63fffa61552db/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REd1846952e3c3581d5ad63fffa61552db/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REd1846952e3c3581d5ad63fffa61552db"}, "emitted_at": 1655893266568} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:16Z", "date_updated": "2022-06-17T13:35:16Z", "start_time": "2022-06-17T13:35:15Z", "duration": 1, "sid": "RE8cf53159431ecb6f89d943bc45c9d0c8", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8cf53159431ecb6f89d943bc45c9d0c8.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8cf53159431ecb6f89d943bc45c9d0c8/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8cf53159431ecb6f89d943bc45c9d0c8/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8cf53159431ecb6f89d943bc45c9d0c8"}, "emitted_at": 1655893266569} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:14Z", "date_updated": "2022-06-17T13:35:14Z", "start_time": "2022-06-17T13:35:14Z", "duration": 1, "sid": "REcf039af0921151d81ffb1ecb55715b5d", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REcf039af0921151d81ffb1ecb55715b5d.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REcf039af0921151d81ffb1ecb55715b5d/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REcf039af0921151d81ffb1ecb55715b5d/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REcf039af0921151d81ffb1ecb55715b5d"}, "emitted_at": 1655893266570} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:12Z", "date_updated": "2022-06-17T13:35:13Z", "start_time": "2022-06-17T13:35:12Z", "duration": 1, "sid": "REffd1f140fe1a9cb16fee50b3ea30edde", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REffd1f140fe1a9cb16fee50b3ea30edde.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REffd1f140fe1a9cb16fee50b3ea30edde/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REffd1f140fe1a9cb16fee50b3ea30edde/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REffd1f140fe1a9cb16fee50b3ea30edde"}, "emitted_at": 1655893266571} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:11Z", "date_updated": "2022-06-17T13:35:11Z", "start_time": "2022-06-17T13:35:10Z", "duration": 1, "sid": "RE9276f81d5d59312058d61aaec4213d3e", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9276f81d5d59312058d61aaec4213d3e.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9276f81d5d59312058d61aaec4213d3e/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9276f81d5d59312058d61aaec4213d3e/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9276f81d5d59312058d61aaec4213d3e"}, "emitted_at": 1655893266572} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:09Z", "date_updated": "2022-06-17T13:35:10Z", "start_time": "2022-06-17T13:35:09Z", "duration": 1, "sid": "RE59db114c487ae28d7ed31e813bf960a4", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE59db114c487ae28d7ed31e813bf960a4.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE59db114c487ae28d7ed31e813bf960a4/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE59db114c487ae28d7ed31e813bf960a4/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE59db114c487ae28d7ed31e813bf960a4"}, "emitted_at": 1655893266573} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:08Z", "date_updated": "2022-06-17T13:35:08Z", "start_time": "2022-06-17T13:35:07Z", "duration": 1, "sid": "REbf7867b95517e64863cc925d002f045f", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REbf7867b95517e64863cc925d002f045f.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REbf7867b95517e64863cc925d002f045f/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REbf7867b95517e64863cc925d002f045f/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REbf7867b95517e64863cc925d002f045f"}, "emitted_at": 1655893266574} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:06Z", "date_updated": "2022-06-17T13:35:06Z", "start_time": "2022-06-17T13:35:06Z", "duration": 1, "sid": "REacefd37adb483ae47662954c9e9f6adb", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REacefd37adb483ae47662954c9e9f6adb.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REacefd37adb483ae47662954c9e9f6adb/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REacefd37adb483ae47662954c9e9f6adb/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REacefd37adb483ae47662954c9e9f6adb"}, "emitted_at": 1655893266575} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:05Z", "date_updated": "2022-06-17T13:35:05Z", "start_time": "2022-06-17T13:35:04Z", "duration": 1, "sid": "RE8885ba402515dcd547596d8207fa0b09", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8885ba402515dcd547596d8207fa0b09.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8885ba402515dcd547596d8207fa0b09/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8885ba402515dcd547596d8207fa0b09/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8885ba402515dcd547596d8207fa0b09"}, "emitted_at": 1655893266576} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:03Z", "date_updated": "2022-06-17T13:35:03Z", "start_time": "2022-06-17T13:35:03Z", "duration": 1, "sid": "REf19c68e4517ef4fbca8faa960a44df9a", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REf19c68e4517ef4fbca8faa960a44df9a.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REf19c68e4517ef4fbca8faa960a44df9a/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REf19c68e4517ef4fbca8faa960a44df9a/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REf19c68e4517ef4fbca8faa960a44df9a"}, "emitted_at": 1655893266577} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:01Z", "date_updated": "2022-06-17T13:35:02Z", "start_time": "2022-06-17T13:35:01Z", "duration": 1, "sid": "RE70b3661407eb1317eec63ce9ca78e570", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE70b3661407eb1317eec63ce9ca78e570.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE70b3661407eb1317eec63ce9ca78e570/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE70b3661407eb1317eec63ce9ca78e570/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE70b3661407eb1317eec63ce9ca78e570"}, "emitted_at": 1655893266578} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:35:00Z", "date_updated": "2022-06-17T13:35:00Z", "start_time": "2022-06-17T13:34:59Z", "duration": 1, "sid": "RE5249b434c9f99d2b5d940886ea9cccdf", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE5249b434c9f99d2b5d940886ea9cccdf.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE5249b434c9f99d2b5d940886ea9cccdf/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE5249b434c9f99d2b5d940886ea9cccdf/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE5249b434c9f99d2b5d940886ea9cccdf"}, "emitted_at": 1655893266579} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:34:58Z", "date_updated": "2022-06-17T13:34:59Z", "start_time": "2022-06-17T13:34:58Z", "duration": 1, "sid": "REdfe7711802ffbfc278619b7beb5f5e0d", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REdfe7711802ffbfc278619b7beb5f5e0d.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REdfe7711802ffbfc278619b7beb5f5e0d/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REdfe7711802ffbfc278619b7beb5f5e0d/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REdfe7711802ffbfc278619b7beb5f5e0d"}, "emitted_at": 1655893266580} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:34:57Z", "date_updated": "2022-06-17T13:34:57Z", "start_time": "2022-06-17T13:34:56Z", "duration": 1, "sid": "RE5105310556bf50aa176587abda779511", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE5105310556bf50aa176587abda779511.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE5105310556bf50aa176587abda779511/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE5105310556bf50aa176587abda779511/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE5105310556bf50aa176587abda779511"}, "emitted_at": 1655893266581} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:34:55Z", "date_updated": "2022-06-17T13:34:55Z", "start_time": "2022-06-17T13:34:55Z", "duration": 1, "sid": "RE7feaa50a0d102ccc86296fa350bfed03", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE7feaa50a0d102ccc86296fa350bfed03.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE7feaa50a0d102ccc86296fa350bfed03/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE7feaa50a0d102ccc86296fa350bfed03/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE7feaa50a0d102ccc86296fa350bfed03"}, "emitted_at": 1655893266582} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:34:54Z", "date_updated": "2022-06-17T13:34:54Z", "start_time": "2022-06-17T13:34:53Z", "duration": 1, "sid": "RE03bc0d83f0615ca23143c895658288ae", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE03bc0d83f0615ca23143c895658288ae.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE03bc0d83f0615ca23143c895658288ae/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE03bc0d83f0615ca23143c895658288ae/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE03bc0d83f0615ca23143c895658288ae"}, "emitted_at": 1655893266583} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:34:52Z", "date_updated": "2022-06-17T13:34:52Z", "start_time": "2022-06-17T13:34:52Z", "duration": 1, "sid": "REe6ddf4a94df9232ebcdac6e12a5159e1", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe6ddf4a94df9232ebcdac6e12a5159e1.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe6ddf4a94df9232ebcdac6e12a5159e1/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe6ddf4a94df9232ebcdac6e12a5159e1/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe6ddf4a94df9232ebcdac6e12a5159e1"}, "emitted_at": 1655893266584} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:34:51Z", "date_updated": "2022-06-17T13:34:51Z", "start_time": "2022-06-17T13:34:50Z", "duration": 1, "sid": "RE2ccfbf804218a00c3631ecad25c7fc91", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE2ccfbf804218a00c3631ecad25c7fc91.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE2ccfbf804218a00c3631ecad25c7fc91/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE2ccfbf804218a00c3631ecad25c7fc91/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE2ccfbf804218a00c3631ecad25c7fc91"}, "emitted_at": 1655893266585} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:34:49Z", "date_updated": "2022-06-17T13:34:49Z", "start_time": "2022-06-17T13:34:48Z", "duration": 1, "sid": "RE561a990c9196bc9f5e60ace007eaaf68", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE561a990c9196bc9f5e60ace007eaaf68.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE561a990c9196bc9f5e60ace007eaaf68/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE561a990c9196bc9f5e60ace007eaaf68/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE561a990c9196bc9f5e60ace007eaaf68"}, "emitted_at": 1655893266586} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:34:47Z", "date_updated": "2022-06-17T13:34:47Z", "start_time": "2022-06-17T13:34:46Z", "duration": 1, "sid": "RE6cbec07d46b8d8cb48d1c9df1b077eb2", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE6cbec07d46b8d8cb48d1c9df1b077eb2.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE6cbec07d46b8d8cb48d1c9df1b077eb2/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE6cbec07d46b8d8cb48d1c9df1b077eb2/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE6cbec07d46b8d8cb48d1c9df1b077eb2"}, "emitted_at": 1655893266587} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:34:45Z", "date_updated": "2022-06-17T13:34:46Z", "start_time": "2022-06-17T13:34:45Z", "duration": 1, "sid": "RE4564507482cdf4ecac5296d54ecf67f0", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE4564507482cdf4ecac5296d54ecf67f0.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE4564507482cdf4ecac5296d54ecf67f0/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE4564507482cdf4ecac5296d54ecf67f0/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE4564507482cdf4ecac5296d54ecf67f0"}, "emitted_at": 1655893266588} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA0a47223735162e1a7df2738327bda2ab", "conference_sid": null, "date_created": "2022-06-17T13:34:44Z", "date_updated": "2022-06-17T13:34:44Z", "start_time": "2022-06-17T13:34:43Z", "duration": 1, "sid": "RE8b6b55f1f6cc12ad012b497a9a3e5942", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8b6b55f1f6cc12ad012b497a9a3e5942.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8b6b55f1f6cc12ad012b497a9a3e5942/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8b6b55f1f6cc12ad012b497a9a3e5942/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8b6b55f1f6cc12ad012b497a9a3e5942"}, "emitted_at": 1655893266589} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:02:43Z", "date_updated": "2022-06-16T20:02:43Z", "start_time": "2022-06-16T20:02:42Z", "duration": 1, "sid": "RE8fcb57e3b9216adfb9abcb0ff0cbc3e3", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8fcb57e3b9216adfb9abcb0ff0cbc3e3.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8fcb57e3b9216adfb9abcb0ff0cbc3e3/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8fcb57e3b9216adfb9abcb0ff0cbc3e3/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8fcb57e3b9216adfb9abcb0ff0cbc3e3"}, "emitted_at": 1655893266590} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:02:41Z", "date_updated": "2022-06-16T20:02:41Z", "start_time": "2022-06-16T20:02:41Z", "duration": 1, "sid": "REe207f874da1e80c1f83bc2819dd7b641", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe207f874da1e80c1f83bc2819dd7b641.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe207f874da1e80c1f83bc2819dd7b641/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe207f874da1e80c1f83bc2819dd7b641/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe207f874da1e80c1f83bc2819dd7b641"}, "emitted_at": 1655893266591} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:02:40Z", "date_updated": "2022-06-16T20:02:40Z", "start_time": "2022-06-16T20:02:39Z", "duration": 1, "sid": "REfc5f11ffc7e5c949f1eaec45f82c2262", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REfc5f11ffc7e5c949f1eaec45f82c2262.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REfc5f11ffc7e5c949f1eaec45f82c2262/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REfc5f11ffc7e5c949f1eaec45f82c2262/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REfc5f11ffc7e5c949f1eaec45f82c2262"}, "emitted_at": 1655893266592} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:02:38Z", "date_updated": "2022-06-16T20:02:38Z", "start_time": "2022-06-16T20:02:38Z", "duration": 1, "sid": "RE9254344850897a2eb582a6496449c989", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9254344850897a2eb582a6496449c989.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9254344850897a2eb582a6496449c989/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9254344850897a2eb582a6496449c989/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9254344850897a2eb582a6496449c989"}, "emitted_at": 1655893266593} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:02:36Z", "date_updated": "2022-06-16T20:02:37Z", "start_time": "2022-06-16T20:02:36Z", "duration": 1, "sid": "REe4bc08352b2c9c6f72a3d2a4d9e03dfb", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe4bc08352b2c9c6f72a3d2a4d9e03dfb.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe4bc08352b2c9c6f72a3d2a4d9e03dfb/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe4bc08352b2c9c6f72a3d2a4d9e03dfb/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe4bc08352b2c9c6f72a3d2a4d9e03dfb"}, "emitted_at": 1655893266594} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:02:35Z", "date_updated": "2022-06-16T20:02:35Z", "start_time": "2022-06-16T20:02:34Z", "duration": 1, "sid": "RE7df69170d2c286cb6f88ec4b57854baa", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE7df69170d2c286cb6f88ec4b57854baa.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE7df69170d2c286cb6f88ec4b57854baa/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE7df69170d2c286cb6f88ec4b57854baa/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE7df69170d2c286cb6f88ec4b57854baa"}, "emitted_at": 1655893266595} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:02:33Z", "date_updated": "2022-06-16T20:02:33Z", "start_time": "2022-06-16T20:02:33Z", "duration": 1, "sid": "RE7f3820a754aa4594b586fb1bd3558da3", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE7f3820a754aa4594b586fb1bd3558da3.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE7f3820a754aa4594b586fb1bd3558da3/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE7f3820a754aa4594b586fb1bd3558da3/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE7f3820a754aa4594b586fb1bd3558da3"}, "emitted_at": 1655893266596} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:02:32Z", "date_updated": "2022-06-16T20:02:32Z", "start_time": "2022-06-16T20:02:31Z", "duration": 1, "sid": "RE0f6a49b976c5523ab536ee200a27b1ae", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE0f6a49b976c5523ab536ee200a27b1ae.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE0f6a49b976c5523ab536ee200a27b1ae/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE0f6a49b976c5523ab536ee200a27b1ae/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE0f6a49b976c5523ab536ee200a27b1ae"}, "emitted_at": 1655893266597} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:02:30Z", "date_updated": "2022-06-16T20:02:30Z", "start_time": "2022-06-16T20:02:30Z", "duration": 1, "sid": "RE428fa0eafa86a55a09890a898344dff8", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE428fa0eafa86a55a09890a898344dff8.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE428fa0eafa86a55a09890a898344dff8/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE428fa0eafa86a55a09890a898344dff8/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE428fa0eafa86a55a09890a898344dff8"}, "emitted_at": 1655893266597} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:02:29Z", "date_updated": "2022-06-16T20:02:29Z", "start_time": "2022-06-16T20:02:28Z", "duration": 1, "sid": "RE8360da2751799fb5ca76d9b6803ce97e", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8360da2751799fb5ca76d9b6803ce97e.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8360da2751799fb5ca76d9b6803ce97e/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8360da2751799fb5ca76d9b6803ce97e/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8360da2751799fb5ca76d9b6803ce97e"}, "emitted_at": 1655893266598} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:02:27Z", "date_updated": "2022-06-16T20:02:27Z", "start_time": "2022-06-16T20:02:27Z", "duration": 1, "sid": "RE97e5747be3f0eee0a50cdfa074984e59", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE97e5747be3f0eee0a50cdfa074984e59.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE97e5747be3f0eee0a50cdfa074984e59/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE97e5747be3f0eee0a50cdfa074984e59/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE97e5747be3f0eee0a50cdfa074984e59"}, "emitted_at": 1655893266599} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:02:25Z", "date_updated": "2022-06-16T20:02:26Z", "start_time": "2022-06-16T20:02:25Z", "duration": 1, "sid": "RE46168f5772673e6919d770ef582b64c9", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE46168f5772673e6919d770ef582b64c9.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE46168f5772673e6919d770ef582b64c9/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE46168f5772673e6919d770ef582b64c9/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE46168f5772673e6919d770ef582b64c9"}, "emitted_at": 1655893266885} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:02:24Z", "date_updated": "2022-06-16T20:02:24Z", "start_time": "2022-06-16T20:02:23Z", "duration": 1, "sid": "RE22ea150bebe92d557b63edcd7eef1152", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE22ea150bebe92d557b63edcd7eef1152.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE22ea150bebe92d557b63edcd7eef1152/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE22ea150bebe92d557b63edcd7eef1152/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE22ea150bebe92d557b63edcd7eef1152"}, "emitted_at": 1655893266890} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:02:22Z", "date_updated": "2022-06-16T20:02:23Z", "start_time": "2022-06-16T20:02:22Z", "duration": 1, "sid": "RE69bb14ef1d1402eba60424f425a11837", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE69bb14ef1d1402eba60424f425a11837.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE69bb14ef1d1402eba60424f425a11837/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE69bb14ef1d1402eba60424f425a11837/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE69bb14ef1d1402eba60424f425a11837"}, "emitted_at": 1655893266894} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:02:21Z", "date_updated": "2022-06-16T20:02:21Z", "start_time": "2022-06-16T20:02:20Z", "duration": 1, "sid": "RE0ab113ea0232f99b50795f8be31f16e6", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE0ab113ea0232f99b50795f8be31f16e6.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE0ab113ea0232f99b50795f8be31f16e6/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE0ab113ea0232f99b50795f8be31f16e6/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE0ab113ea0232f99b50795f8be31f16e6"}, "emitted_at": 1655893266899} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:02:19Z", "date_updated": "2022-06-16T20:02:20Z", "start_time": "2022-06-16T20:02:19Z", "duration": 1, "sid": "RE038407befb8fe42e562b0a59badfeb4d", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE038407befb8fe42e562b0a59badfeb4d.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE038407befb8fe42e562b0a59badfeb4d/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE038407befb8fe42e562b0a59badfeb4d/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE038407befb8fe42e562b0a59badfeb4d"}, "emitted_at": 1655893266904} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:02:18Z", "date_updated": "2022-06-16T20:02:18Z", "start_time": "2022-06-16T20:02:17Z", "duration": 1, "sid": "RE9cc3aba5c16bdebfbb0793d10635f097", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9cc3aba5c16bdebfbb0793d10635f097.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9cc3aba5c16bdebfbb0793d10635f097/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9cc3aba5c16bdebfbb0793d10635f097/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9cc3aba5c16bdebfbb0793d10635f097"}, "emitted_at": 1655893266908} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:02:16Z", "date_updated": "2022-06-16T20:02:16Z", "start_time": "2022-06-16T20:02:16Z", "duration": 1, "sid": "RE24e51d000e182afb4e25f0a557ef2a10", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE24e51d000e182afb4e25f0a557ef2a10.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE24e51d000e182afb4e25f0a557ef2a10/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE24e51d000e182afb4e25f0a557ef2a10/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE24e51d000e182afb4e25f0a557ef2a10"}, "emitted_at": 1655893266911} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:02:14Z", "date_updated": "2022-06-16T20:02:15Z", "start_time": "2022-06-16T20:02:14Z", "duration": 1, "sid": "REf35522025a8e7a7c767d16b245bb6ba6", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REf35522025a8e7a7c767d16b245bb6ba6.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REf35522025a8e7a7c767d16b245bb6ba6/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REf35522025a8e7a7c767d16b245bb6ba6/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REf35522025a8e7a7c767d16b245bb6ba6"}, "emitted_at": 1655893266914} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:02:13Z", "date_updated": "2022-06-16T20:02:13Z", "start_time": "2022-06-16T20:02:12Z", "duration": 1, "sid": "RE812153c30dad6e033cc5e45dea98839c", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE812153c30dad6e033cc5e45dea98839c.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE812153c30dad6e033cc5e45dea98839c/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE812153c30dad6e033cc5e45dea98839c/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE812153c30dad6e033cc5e45dea98839c"}, "emitted_at": 1655893266917} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:02:11Z", "date_updated": "2022-06-16T20:02:11Z", "start_time": "2022-06-16T20:02:11Z", "duration": 1, "sid": "REefba4018cccb7046d4469934b640c545", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REefba4018cccb7046d4469934b640c545.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REefba4018cccb7046d4469934b640c545/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REefba4018cccb7046d4469934b640c545/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REefba4018cccb7046d4469934b640c545"}, "emitted_at": 1655893266919} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:02:10Z", "date_updated": "2022-06-16T20:02:10Z", "start_time": "2022-06-16T20:02:09Z", "duration": 1, "sid": "RE4202b6ddd98dd819274a81557f8a21f4", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE4202b6ddd98dd819274a81557f8a21f4.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE4202b6ddd98dd819274a81557f8a21f4/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE4202b6ddd98dd819274a81557f8a21f4/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE4202b6ddd98dd819274a81557f8a21f4"}, "emitted_at": 1655893266921} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:02:08Z", "date_updated": "2022-06-16T20:02:08Z", "start_time": "2022-06-16T20:02:08Z", "duration": 1, "sid": "RE20d6d0eba503d05db6218229bf53d716", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE20d6d0eba503d05db6218229bf53d716.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE20d6d0eba503d05db6218229bf53d716/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE20d6d0eba503d05db6218229bf53d716/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE20d6d0eba503d05db6218229bf53d716"}, "emitted_at": 1655893266923} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:02:07Z", "date_updated": "2022-06-16T20:02:07Z", "start_time": "2022-06-16T20:02:06Z", "duration": 1, "sid": "REe7ed48d41170651cfa867f7eeb838a00", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe7ed48d41170651cfa867f7eeb838a00.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe7ed48d41170651cfa867f7eeb838a00/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe7ed48d41170651cfa867f7eeb838a00/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe7ed48d41170651cfa867f7eeb838a00"}, "emitted_at": 1655893266925} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:02:05Z", "date_updated": "2022-06-16T20:02:05Z", "start_time": "2022-06-16T20:02:05Z", "duration": 1, "sid": "REcb36de1db255c2ae771e2c0479dca682", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REcb36de1db255c2ae771e2c0479dca682.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REcb36de1db255c2ae771e2c0479dca682/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REcb36de1db255c2ae771e2c0479dca682/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REcb36de1db255c2ae771e2c0479dca682"}, "emitted_at": 1655893266927} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:02:04Z", "date_updated": "2022-06-16T20:02:04Z", "start_time": "2022-06-16T20:02:03Z", "duration": 1, "sid": "REc23a1d0163fd970962766564968b3317", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REc23a1d0163fd970962766564968b3317.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REc23a1d0163fd970962766564968b3317/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REc23a1d0163fd970962766564968b3317/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REc23a1d0163fd970962766564968b3317"}, "emitted_at": 1655893266928} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:02:02Z", "date_updated": "2022-06-16T20:02:02Z", "start_time": "2022-06-16T20:02:01Z", "duration": 1, "sid": "RE239a6c2c968948bcb0c6cf4118f0141a", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE239a6c2c968948bcb0c6cf4118f0141a.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE239a6c2c968948bcb0c6cf4118f0141a/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE239a6c2c968948bcb0c6cf4118f0141a/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE239a6c2c968948bcb0c6cf4118f0141a"}, "emitted_at": 1655893266930} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:02:00Z", "date_updated": "2022-06-16T20:02:01Z", "start_time": "2022-06-16T20:02:00Z", "duration": 1, "sid": "RE442936bf41a0c250840b3d4c8491e679", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE442936bf41a0c250840b3d4c8491e679.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE442936bf41a0c250840b3d4c8491e679/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE442936bf41a0c250840b3d4c8491e679/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE442936bf41a0c250840b3d4c8491e679"}, "emitted_at": 1655893266931} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:01:58Z", "date_updated": "2022-06-16T20:01:59Z", "start_time": "2022-06-16T20:01:58Z", "duration": 1, "sid": "REfda4af62f9bd0cbafc338a8bd5365247", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REfda4af62f9bd0cbafc338a8bd5365247.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REfda4af62f9bd0cbafc338a8bd5365247/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REfda4af62f9bd0cbafc338a8bd5365247/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REfda4af62f9bd0cbafc338a8bd5365247"}, "emitted_at": 1655893266933} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:01:57Z", "date_updated": "2022-06-16T20:01:57Z", "start_time": "2022-06-16T20:01:56Z", "duration": 1, "sid": "RE67c1a624f0d21b9f5c5e8d057c73cbb9", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE67c1a624f0d21b9f5c5e8d057c73cbb9.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE67c1a624f0d21b9f5c5e8d057c73cbb9/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE67c1a624f0d21b9f5c5e8d057c73cbb9/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE67c1a624f0d21b9f5c5e8d057c73cbb9"}, "emitted_at": 1655893266934} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:01:55Z", "date_updated": "2022-06-16T20:01:55Z", "start_time": "2022-06-16T20:01:55Z", "duration": 1, "sid": "RE4486241ce6c1a1087d1a4d0d34beb4f3", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE4486241ce6c1a1087d1a4d0d34beb4f3.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE4486241ce6c1a1087d1a4d0d34beb4f3/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE4486241ce6c1a1087d1a4d0d34beb4f3/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE4486241ce6c1a1087d1a4d0d34beb4f3"}, "emitted_at": 1655893266935} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:01:54Z", "date_updated": "2022-06-16T20:01:54Z", "start_time": "2022-06-16T20:01:53Z", "duration": 1, "sid": "REa5a8a32a2f35d0a45fc6454fcf85473e", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa5a8a32a2f35d0a45fc6454fcf85473e.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa5a8a32a2f35d0a45fc6454fcf85473e/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa5a8a32a2f35d0a45fc6454fcf85473e/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa5a8a32a2f35d0a45fc6454fcf85473e"}, "emitted_at": 1655893266936} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:01:52Z", "date_updated": "2022-06-16T20:01:52Z", "start_time": "2022-06-16T20:01:52Z", "duration": 1, "sid": "RE3e10756fca2b0cd0de866727b02d8851", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE3e10756fca2b0cd0de866727b02d8851.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE3e10756fca2b0cd0de866727b02d8851/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE3e10756fca2b0cd0de866727b02d8851/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE3e10756fca2b0cd0de866727b02d8851"}, "emitted_at": 1655893266938} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:01:51Z", "date_updated": "2022-06-16T20:01:51Z", "start_time": "2022-06-16T20:01:50Z", "duration": 1, "sid": "RE8847d820416a2d9867618419ef426aed", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8847d820416a2d9867618419ef426aed.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8847d820416a2d9867618419ef426aed/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8847d820416a2d9867618419ef426aed/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8847d820416a2d9867618419ef426aed"}, "emitted_at": 1655893266939} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:01:49Z", "date_updated": "2022-06-16T20:01:49Z", "start_time": "2022-06-16T20:01:48Z", "duration": 1, "sid": "REc4daf843d81cab7e9e30aa5fbd307b03", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REc4daf843d81cab7e9e30aa5fbd307b03.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REc4daf843d81cab7e9e30aa5fbd307b03/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REc4daf843d81cab7e9e30aa5fbd307b03/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REc4daf843d81cab7e9e30aa5fbd307b03"}, "emitted_at": 1655893266940} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:01:47Z", "date_updated": "2022-06-16T20:01:48Z", "start_time": "2022-06-16T20:01:47Z", "duration": 1, "sid": "RE50da64dbf6291c0e4f088eff5e2a5f9c", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE50da64dbf6291c0e4f088eff5e2a5f9c.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE50da64dbf6291c0e4f088eff5e2a5f9c/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE50da64dbf6291c0e4f088eff5e2a5f9c/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE50da64dbf6291c0e4f088eff5e2a5f9c"}, "emitted_at": 1655893266941} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:01:46Z", "date_updated": "2022-06-16T20:01:46Z", "start_time": "2022-06-16T20:01:45Z", "duration": 1, "sid": "REb8ccce85a8af0510ffe72d6d7f2e7511", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb8ccce85a8af0510ffe72d6d7f2e7511.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb8ccce85a8af0510ffe72d6d7f2e7511/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb8ccce85a8af0510ffe72d6d7f2e7511/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb8ccce85a8af0510ffe72d6d7f2e7511"}, "emitted_at": 1655893266942} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:01:44Z", "date_updated": "2022-06-16T20:01:44Z", "start_time": "2022-06-16T20:01:44Z", "duration": 1, "sid": "RE4bafe32f34293f283df99842213bf347", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE4bafe32f34293f283df99842213bf347.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE4bafe32f34293f283df99842213bf347/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE4bafe32f34293f283df99842213bf347/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE4bafe32f34293f283df99842213bf347"}, "emitted_at": 1655893266943} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:01:43Z", "date_updated": "2022-06-16T20:01:43Z", "start_time": "2022-06-16T20:01:42Z", "duration": 1, "sid": "REc520b1f3f7c8983423c6bf49c52ebb63", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REc520b1f3f7c8983423c6bf49c52ebb63.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REc520b1f3f7c8983423c6bf49c52ebb63/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REc520b1f3f7c8983423c6bf49c52ebb63/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REc520b1f3f7c8983423c6bf49c52ebb63"}, "emitted_at": 1655893266944} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:01:41Z", "date_updated": "2022-06-16T20:01:41Z", "start_time": "2022-06-16T20:01:40Z", "duration": 1, "sid": "REf47bfb827f8a7eb26f27ea49e3e5d35e", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REf47bfb827f8a7eb26f27ea49e3e5d35e.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REf47bfb827f8a7eb26f27ea49e3e5d35e/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REf47bfb827f8a7eb26f27ea49e3e5d35e/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REf47bfb827f8a7eb26f27ea49e3e5d35e"}, "emitted_at": 1655893266945} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:01:40Z", "date_updated": "2022-06-16T20:01:40Z", "start_time": "2022-06-16T20:01:39Z", "duration": 1, "sid": "RE06c44f89b595813862e46199ab63ffbf", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE06c44f89b595813862e46199ab63ffbf.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE06c44f89b595813862e46199ab63ffbf/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE06c44f89b595813862e46199ab63ffbf/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE06c44f89b595813862e46199ab63ffbf"}, "emitted_at": 1655893266946} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:01:38Z", "date_updated": "2022-06-16T20:01:38Z", "start_time": "2022-06-16T20:01:37Z", "duration": 1, "sid": "RE9589c724924632088c0a48024e0625e8", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9589c724924632088c0a48024e0625e8.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9589c724924632088c0a48024e0625e8/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9589c724924632088c0a48024e0625e8/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9589c724924632088c0a48024e0625e8"}, "emitted_at": 1655893266947} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:01:36Z", "date_updated": "2022-06-16T20:01:36Z", "start_time": "2022-06-16T20:01:36Z", "duration": 1, "sid": "RE98f4f11f6b401b25003c778f5a6a84cf", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE98f4f11f6b401b25003c778f5a6a84cf.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE98f4f11f6b401b25003c778f5a6a84cf/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE98f4f11f6b401b25003c778f5a6a84cf/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE98f4f11f6b401b25003c778f5a6a84cf"}, "emitted_at": 1655893266948} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:01:35Z", "date_updated": "2022-06-16T20:01:35Z", "start_time": "2022-06-16T20:01:34Z", "duration": 1, "sid": "REfea1fd60331fd295d104927d5692b237", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REfea1fd60331fd295d104927d5692b237.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REfea1fd60331fd295d104927d5692b237/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REfea1fd60331fd295d104927d5692b237/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REfea1fd60331fd295d104927d5692b237"}, "emitted_at": 1655893266949} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:01:33Z", "date_updated": "2022-06-16T20:01:33Z", "start_time": "2022-06-16T20:01:33Z", "duration": 1, "sid": "RE5f3ccb9a8bf774ba13c02ccb52678b7a", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE5f3ccb9a8bf774ba13c02ccb52678b7a.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE5f3ccb9a8bf774ba13c02ccb52678b7a/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE5f3ccb9a8bf774ba13c02ccb52678b7a/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE5f3ccb9a8bf774ba13c02ccb52678b7a"}, "emitted_at": 1655893266950} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:01:32Z", "date_updated": "2022-06-16T20:01:32Z", "start_time": "2022-06-16T20:01:31Z", "duration": 1, "sid": "RE99d837b85affa798811ccf1a3e1088d4", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE99d837b85affa798811ccf1a3e1088d4.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE99d837b85affa798811ccf1a3e1088d4/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE99d837b85affa798811ccf1a3e1088d4/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE99d837b85affa798811ccf1a3e1088d4"}, "emitted_at": 1655893266951} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:01:30Z", "date_updated": "2022-06-16T20:01:30Z", "start_time": "2022-06-16T20:01:29Z", "duration": 1, "sid": "RE6d9899f1224db4679c6505a1838b49e4", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE6d9899f1224db4679c6505a1838b49e4.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE6d9899f1224db4679c6505a1838b49e4/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE6d9899f1224db4679c6505a1838b49e4/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE6d9899f1224db4679c6505a1838b49e4"}, "emitted_at": 1655893266952} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:01:26Z", "date_updated": "2022-06-16T20:01:29Z", "start_time": "2022-06-16T20:01:26Z", "duration": 2, "sid": "RE8bf14aab39ee9656698be4f3c116b2a8", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8bf14aab39ee9656698be4f3c116b2a8.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8bf14aab39ee9656698be4f3c116b2a8/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8bf14aab39ee9656698be4f3c116b2a8/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8bf14aab39ee9656698be4f3c116b2a8"}, "emitted_at": 1655893266953} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:01:24Z", "date_updated": "2022-06-16T20:01:25Z", "start_time": "2022-06-16T20:01:23Z", "duration": 2, "sid": "RE1cc63beca47ac74ed79dd3beb32ea684", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE1cc63beca47ac74ed79dd3beb32ea684.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE1cc63beca47ac74ed79dd3beb32ea684/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE1cc63beca47ac74ed79dd3beb32ea684/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE1cc63beca47ac74ed79dd3beb32ea684"}, "emitted_at": 1655893266954} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:01:20Z", "date_updated": "2022-06-16T20:01:22Z", "start_time": "2022-06-16T20:01:20Z", "duration": 2, "sid": "RE291950779d909ba4260a9d253e9a280b", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE291950779d909ba4260a9d253e9a280b.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE291950779d909ba4260a9d253e9a280b/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE291950779d909ba4260a9d253e9a280b/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE291950779d909ba4260a9d253e9a280b"}, "emitted_at": 1655893266955} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:01:19Z", "date_updated": "2022-06-16T20:01:19Z", "start_time": "2022-06-16T20:01:18Z", "duration": 1, "sid": "RE16c64652f34cb37d135993dcbcea4132", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE16c64652f34cb37d135993dcbcea4132.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE16c64652f34cb37d135993dcbcea4132/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE16c64652f34cb37d135993dcbcea4132/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE16c64652f34cb37d135993dcbcea4132"}, "emitted_at": 1655893266956} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:01:17Z", "date_updated": "2022-06-16T20:01:17Z", "start_time": "2022-06-16T20:01:17Z", "duration": 1, "sid": "RE3105dc4b4835d67d9e3b80ac719c2586", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE3105dc4b4835d67d9e3b80ac719c2586.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE3105dc4b4835d67d9e3b80ac719c2586/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE3105dc4b4835d67d9e3b80ac719c2586/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE3105dc4b4835d67d9e3b80ac719c2586"}, "emitted_at": 1655893266957} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:01:16Z", "date_updated": "2022-06-16T20:01:16Z", "start_time": "2022-06-16T20:01:15Z", "duration": 1, "sid": "RE9398a30e673674dc27899cbd7ad82079", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9398a30e673674dc27899cbd7ad82079.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9398a30e673674dc27899cbd7ad82079/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9398a30e673674dc27899cbd7ad82079/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9398a30e673674dc27899cbd7ad82079"}, "emitted_at": 1655893266958} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:01:14Z", "date_updated": "2022-06-16T20:01:14Z", "start_time": "2022-06-16T20:01:13Z", "duration": 1, "sid": "RE93879ff75c565fcb76789fcac32132d8", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE93879ff75c565fcb76789fcac32132d8.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE93879ff75c565fcb76789fcac32132d8/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE93879ff75c565fcb76789fcac32132d8/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE93879ff75c565fcb76789fcac32132d8"}, "emitted_at": 1655893266959} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:01:12Z", "date_updated": "2022-06-16T20:01:13Z", "start_time": "2022-06-16T20:01:12Z", "duration": 1, "sid": "RE7e3d0868eb8c3687f7ecf366d548c1b5", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE7e3d0868eb8c3687f7ecf366d548c1b5.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE7e3d0868eb8c3687f7ecf366d548c1b5/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE7e3d0868eb8c3687f7ecf366d548c1b5/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE7e3d0868eb8c3687f7ecf366d548c1b5"}, "emitted_at": 1655893266960} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:01:11Z", "date_updated": "2022-06-16T20:01:11Z", "start_time": "2022-06-16T20:01:10Z", "duration": 1, "sid": "RE9bdcb8ed3c46c2c0657e6ca2db2081f3", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9bdcb8ed3c46c2c0657e6ca2db2081f3.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9bdcb8ed3c46c2c0657e6ca2db2081f3/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9bdcb8ed3c46c2c0657e6ca2db2081f3/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9bdcb8ed3c46c2c0657e6ca2db2081f3"}, "emitted_at": 1655893266961} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:01:09Z", "date_updated": "2022-06-16T20:01:09Z", "start_time": "2022-06-16T20:01:09Z", "duration": 1, "sid": "REa9ff43679b37ab913b96cd6c5648bed5", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa9ff43679b37ab913b96cd6c5648bed5.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa9ff43679b37ab913b96cd6c5648bed5/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa9ff43679b37ab913b96cd6c5648bed5/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa9ff43679b37ab913b96cd6c5648bed5"}, "emitted_at": 1655893266962} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:01:08Z", "date_updated": "2022-06-16T20:01:08Z", "start_time": "2022-06-16T20:01:07Z", "duration": 1, "sid": "REbbdf9380e3460130b9ff04d051aa3696", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REbbdf9380e3460130b9ff04d051aa3696.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REbbdf9380e3460130b9ff04d051aa3696/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REbbdf9380e3460130b9ff04d051aa3696/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REbbdf9380e3460130b9ff04d051aa3696"}, "emitted_at": 1655893266963} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:01:06Z", "date_updated": "2022-06-16T20:01:06Z", "start_time": "2022-06-16T20:01:06Z", "duration": 1, "sid": "REf7ccc7730fc0c7fe6817c96c6b1a04f4", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REf7ccc7730fc0c7fe6817c96c6b1a04f4.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REf7ccc7730fc0c7fe6817c96c6b1a04f4/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REf7ccc7730fc0c7fe6817c96c6b1a04f4/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REf7ccc7730fc0c7fe6817c96c6b1a04f4"}, "emitted_at": 1655893266964} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:01:04Z", "date_updated": "2022-06-16T20:01:05Z", "start_time": "2022-06-16T20:01:04Z", "duration": 1, "sid": "RE981ee4b0819774245797a32fec44dac2", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE981ee4b0819774245797a32fec44dac2.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE981ee4b0819774245797a32fec44dac2/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE981ee4b0819774245797a32fec44dac2/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE981ee4b0819774245797a32fec44dac2"}, "emitted_at": 1655893266965} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:01:03Z", "date_updated": "2022-06-16T20:01:03Z", "start_time": "2022-06-16T20:01:02Z", "duration": 1, "sid": "RE4a66f23048ad35aef2c53435c3dfa766", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE4a66f23048ad35aef2c53435c3dfa766.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE4a66f23048ad35aef2c53435c3dfa766/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE4a66f23048ad35aef2c53435c3dfa766/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE4a66f23048ad35aef2c53435c3dfa766"}, "emitted_at": 1655893266966} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:01:01Z", "date_updated": "2022-06-16T20:01:02Z", "start_time": "2022-06-16T20:01:01Z", "duration": 1, "sid": "RE02b7bc2061b98a05d0a26b602ba42b0e", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE02b7bc2061b98a05d0a26b602ba42b0e.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE02b7bc2061b98a05d0a26b602ba42b0e/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE02b7bc2061b98a05d0a26b602ba42b0e/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE02b7bc2061b98a05d0a26b602ba42b0e"}, "emitted_at": 1655893266967} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:00:59Z", "date_updated": "2022-06-16T20:01:00Z", "start_time": "2022-06-16T20:00:59Z", "duration": 1, "sid": "RE19fa8999a6c62d0a88b6dfa352ff04fb", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE19fa8999a6c62d0a88b6dfa352ff04fb.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE19fa8999a6c62d0a88b6dfa352ff04fb/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE19fa8999a6c62d0a88b6dfa352ff04fb/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE19fa8999a6c62d0a88b6dfa352ff04fb"}, "emitted_at": 1655893266968} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:00:56Z", "date_updated": "2022-06-16T20:00:58Z", "start_time": "2022-06-16T20:00:56Z", "duration": 3, "sid": "RE64bd8b998bb10e32794685da660cfda5", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE64bd8b998bb10e32794685da660cfda5.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE64bd8b998bb10e32794685da660cfda5/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE64bd8b998bb10e32794685da660cfda5/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE64bd8b998bb10e32794685da660cfda5"}, "emitted_at": 1655893266969} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:00:54Z", "date_updated": "2022-06-16T20:00:55Z", "start_time": "2022-06-16T20:00:53Z", "duration": 2, "sid": "REfb431e39b6f99a3b0dd057c46344fd71", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REfb431e39b6f99a3b0dd057c46344fd71.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REfb431e39b6f99a3b0dd057c46344fd71/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REfb431e39b6f99a3b0dd057c46344fd71/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REfb431e39b6f99a3b0dd057c46344fd71"}, "emitted_at": 1655893266971} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:00:52Z", "date_updated": "2022-06-16T20:00:52Z", "start_time": "2022-06-16T20:00:52Z", "duration": 1, "sid": "RE981857633d4983ce1a20469a7a5d03a5", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE981857633d4983ce1a20469a7a5d03a5.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE981857633d4983ce1a20469a7a5d03a5/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE981857633d4983ce1a20469a7a5d03a5/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE981857633d4983ce1a20469a7a5d03a5"}, "emitted_at": 1655893266972} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:00:51Z", "date_updated": "2022-06-16T20:00:51Z", "start_time": "2022-06-16T20:00:50Z", "duration": 1, "sid": "RE5d30419b0c923154d91030e1a2d2ce6d", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE5d30419b0c923154d91030e1a2d2ce6d.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE5d30419b0c923154d91030e1a2d2ce6d/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE5d30419b0c923154d91030e1a2d2ce6d/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE5d30419b0c923154d91030e1a2d2ce6d"}, "emitted_at": 1655893266974} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:00:49Z", "date_updated": "2022-06-16T20:00:49Z", "start_time": "2022-06-16T20:00:49Z", "duration": 1, "sid": "REf09321db4a9c428c10308c7af4857aba", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REf09321db4a9c428c10308c7af4857aba.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REf09321db4a9c428c10308c7af4857aba/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REf09321db4a9c428c10308c7af4857aba/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REf09321db4a9c428c10308c7af4857aba"}, "emitted_at": 1655893266976} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:00:48Z", "date_updated": "2022-06-16T20:00:48Z", "start_time": "2022-06-16T20:00:47Z", "duration": 1, "sid": "RE5d5952d4ed4492e8c87aa2626c00c8a2", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE5d5952d4ed4492e8c87aa2626c00c8a2.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE5d5952d4ed4492e8c87aa2626c00c8a2/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE5d5952d4ed4492e8c87aa2626c00c8a2/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE5d5952d4ed4492e8c87aa2626c00c8a2"}, "emitted_at": 1655893266977} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:00:46Z", "date_updated": "2022-06-16T20:00:46Z", "start_time": "2022-06-16T20:00:45Z", "duration": 1, "sid": "REb5826027d968785a8de9fc664268bd88", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb5826027d968785a8de9fc664268bd88.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb5826027d968785a8de9fc664268bd88/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb5826027d968785a8de9fc664268bd88/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb5826027d968785a8de9fc664268bd88"}, "emitted_at": 1655893266979} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:00:44Z", "date_updated": "2022-06-16T20:00:45Z", "start_time": "2022-06-16T20:00:44Z", "duration": 1, "sid": "RE6658183d65ab3b44451783f3684c6a93", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE6658183d65ab3b44451783f3684c6a93.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE6658183d65ab3b44451783f3684c6a93/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE6658183d65ab3b44451783f3684c6a93/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE6658183d65ab3b44451783f3684c6a93"}, "emitted_at": 1655893266980} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:00:43Z", "date_updated": "2022-06-16T20:00:43Z", "start_time": "2022-06-16T20:00:42Z", "duration": 1, "sid": "REfc7362d2ebb6bf0778b2cd6b428f3e7d", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REfc7362d2ebb6bf0778b2cd6b428f3e7d.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REfc7362d2ebb6bf0778b2cd6b428f3e7d/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REfc7362d2ebb6bf0778b2cd6b428f3e7d/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REfc7362d2ebb6bf0778b2cd6b428f3e7d"}, "emitted_at": 1655893266982} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAace5c8813c499253bbbff29ad0da0ccb", "conference_sid": null, "date_created": "2022-06-16T20:00:41Z", "date_updated": "2022-06-16T20:00:42Z", "start_time": "2022-06-16T20:00:41Z", "duration": 1, "sid": "RE2dd35ee06753a370132456a3c0b797b5", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE2dd35ee06753a370132456a3c0b797b5.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE2dd35ee06753a370132456a3c0b797b5/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE2dd35ee06753a370132456a3c0b797b5/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE2dd35ee06753a370132456a3c0b797b5"}, "emitted_at": 1655893266984} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAa24e9fbcb6eba3c8cfefc248a3c0b5b4", "conference_sid": null, "date_created": "2022-06-02T12:54:04Z", "date_updated": "2022-06-02T12:54:05Z", "start_time": "2022-06-02T12:54:04Z", "duration": 1, "sid": "RE3aa50a962ab3bb86215f2f5765332947", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE3aa50a962ab3bb86215f2f5765332947.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE3aa50a962ab3bb86215f2f5765332947/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE3aa50a962ab3bb86215f2f5765332947/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE3aa50a962ab3bb86215f2f5765332947"}, "emitted_at": 1655893266985} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAa24e9fbcb6eba3c8cfefc248a3c0b5b4", "conference_sid": null, "date_created": "2022-06-02T12:54:03Z", "date_updated": "2022-06-02T12:54:03Z", "start_time": "2022-06-02T12:54:02Z", "duration": 1, "sid": "RE689d590e9c6e2fbb47a925d4a0596226", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE689d590e9c6e2fbb47a925d4a0596226.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE689d590e9c6e2fbb47a925d4a0596226/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE689d590e9c6e2fbb47a925d4a0596226/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE689d590e9c6e2fbb47a925d4a0596226"}, "emitted_at": 1655893266987} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA65f8d6ee9f8783233750f2b0f99cf1b3", "conference_sid": null, "date_created": "2022-05-26T22:14:12Z", "date_updated": "2022-05-26T22:14:18Z", "start_time": "2022-05-26T22:14:12Z", "duration": 6, "sid": "RE75c4f893b307d6d02932adce71b4add9", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE75c4f893b307d6d02932adce71b4add9.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE75c4f893b307d6d02932adce71b4add9/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE75c4f893b307d6d02932adce71b4add9/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE75c4f893b307d6d02932adce71b4add9"}, "emitted_at": 1655893266988} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA65f8d6ee9f8783233750f2b0f99cf1b3", "conference_sid": null, "date_created": "2022-05-26T22:14:11Z", "date_updated": "2022-05-26T22:14:11Z", "start_time": "2022-05-26T22:14:10Z", "duration": 1, "sid": "RE63107dad7b3fc6b3c31cfdbbebbe9597", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE63107dad7b3fc6b3c31cfdbbebbe9597.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE63107dad7b3fc6b3c31cfdbbebbe9597/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE63107dad7b3fc6b3c31cfdbbebbe9597/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE63107dad7b3fc6b3c31cfdbbebbe9597"}, "emitted_at": 1655893266990} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA65f8d6ee9f8783233750f2b0f99cf1b3", "conference_sid": null, "date_created": "2022-05-26T22:14:09Z", "date_updated": "2022-05-26T22:14:10Z", "start_time": "2022-05-26T22:14:09Z", "duration": 1, "sid": "REb4bbe72e0a8a22d4fcb50b035fb4d702", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb4bbe72e0a8a22d4fcb50b035fb4d702.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb4bbe72e0a8a22d4fcb50b035fb4d702/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb4bbe72e0a8a22d4fcb50b035fb4d702/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb4bbe72e0a8a22d4fcb50b035fb4d702"}, "emitted_at": 1655893266992} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA65f8d6ee9f8783233750f2b0f99cf1b3", "conference_sid": null, "date_created": "2022-05-26T22:14:08Z", "date_updated": "2022-05-26T22:14:08Z", "start_time": "2022-05-26T22:14:07Z", "duration": 1, "sid": "RE6fb436c9fd1ec95e00a8d3a9b0bd26f0", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE6fb436c9fd1ec95e00a8d3a9b0bd26f0.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE6fb436c9fd1ec95e00a8d3a9b0bd26f0/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE6fb436c9fd1ec95e00a8d3a9b0bd26f0/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE6fb436c9fd1ec95e00a8d3a9b0bd26f0"}, "emitted_at": 1655893266993} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA65f8d6ee9f8783233750f2b0f99cf1b3", "conference_sid": null, "date_created": "2022-05-26T22:14:06Z", "date_updated": "2022-05-26T22:14:06Z", "start_time": "2022-05-26T22:14:05Z", "duration": 1, "sid": "RE3dd3bd929eadff7a4ed800e96d9c55ce", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE3dd3bd929eadff7a4ed800e96d9c55ce.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE3dd3bd929eadff7a4ed800e96d9c55ce/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE3dd3bd929eadff7a4ed800e96d9c55ce/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE3dd3bd929eadff7a4ed800e96d9c55ce"}, "emitted_at": 1655893266995} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA65f8d6ee9f8783233750f2b0f99cf1b3", "conference_sid": null, "date_created": "2022-05-26T22:14:04Z", "date_updated": "2022-05-26T22:14:05Z", "start_time": "2022-05-26T22:14:04Z", "duration": 1, "sid": "RE9bf94c8f8709c0480653133bb1e70529", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9bf94c8f8709c0480653133bb1e70529.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9bf94c8f8709c0480653133bb1e70529/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9bf94c8f8709c0480653133bb1e70529/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9bf94c8f8709c0480653133bb1e70529"}, "emitted_at": 1655893266997} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA65f8d6ee9f8783233750f2b0f99cf1b3", "conference_sid": null, "date_created": "2022-05-26T22:13:41Z", "date_updated": "2022-05-26T22:14:03Z", "start_time": "2022-05-26T22:13:41Z", "duration": 22, "sid": "RE8ab08e0d46c9bd4876ad4403e2b6abeb", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8ab08e0d46c9bd4876ad4403e2b6abeb.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8ab08e0d46c9bd4876ad4403e2b6abeb/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8ab08e0d46c9bd4876ad4403e2b6abeb/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8ab08e0d46c9bd4876ad4403e2b6abeb"}, "emitted_at": 1655893266999} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA65f8d6ee9f8783233750f2b0f99cf1b3", "conference_sid": null, "date_created": "2022-05-26T22:13:40Z", "date_updated": "2022-05-26T22:13:40Z", "start_time": "2022-05-26T22:13:39Z", "duration": 1, "sid": "RE2577c3ce743f157b40ce7861b304905d", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE2577c3ce743f157b40ce7861b304905d.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE2577c3ce743f157b40ce7861b304905d/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE2577c3ce743f157b40ce7861b304905d/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE2577c3ce743f157b40ce7861b304905d"}, "emitted_at": 1655893267000} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA65f8d6ee9f8783233750f2b0f99cf1b3", "conference_sid": null, "date_created": "2022-05-26T22:13:38Z", "date_updated": "2022-05-26T22:13:38Z", "start_time": "2022-05-26T22:13:38Z", "duration": 1, "sid": "REc57d5519588bea5548f11528011cf7c9", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REc57d5519588bea5548f11528011cf7c9.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REc57d5519588bea5548f11528011cf7c9/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REc57d5519588bea5548f11528011cf7c9/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REc57d5519588bea5548f11528011cf7c9"}, "emitted_at": 1655893267002} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA65f8d6ee9f8783233750f2b0f99cf1b3", "conference_sid": null, "date_created": "2022-05-26T22:13:37Z", "date_updated": "2022-05-26T22:13:37Z", "start_time": "2022-05-26T22:13:36Z", "duration": 1, "sid": "REd6b9f2caea44b862fbc4bc9a8189295f", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REd6b9f2caea44b862fbc4bc9a8189295f.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REd6b9f2caea44b862fbc4bc9a8189295f/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REd6b9f2caea44b862fbc4bc9a8189295f/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REd6b9f2caea44b862fbc4bc9a8189295f"}, "emitted_at": 1655893267004} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA65f8d6ee9f8783233750f2b0f99cf1b3", "conference_sid": null, "date_created": "2022-05-26T22:13:35Z", "date_updated": "2022-05-26T22:13:35Z", "start_time": "2022-05-26T22:13:34Z", "duration": 1, "sid": "REe567d8d8ec39ec0f5a6512c76f3eab7b", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe567d8d8ec39ec0f5a6512c76f3eab7b.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe567d8d8ec39ec0f5a6512c76f3eab7b/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe567d8d8ec39ec0f5a6512c76f3eab7b/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REe567d8d8ec39ec0f5a6512c76f3eab7b"}, "emitted_at": 1655893267005} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA65f8d6ee9f8783233750f2b0f99cf1b3", "conference_sid": null, "date_created": "2022-05-26T22:13:33Z", "date_updated": "2022-05-26T22:13:34Z", "start_time": "2022-05-26T22:13:33Z", "duration": 1, "sid": "RE6e514b05df197cdbcc52ee8dcbd861f4", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE6e514b05df197cdbcc52ee8dcbd861f4.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE6e514b05df197cdbcc52ee8dcbd861f4/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE6e514b05df197cdbcc52ee8dcbd861f4/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE6e514b05df197cdbcc52ee8dcbd861f4"}, "emitted_at": 1655893267007} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA65f8d6ee9f8783233750f2b0f99cf1b3", "conference_sid": null, "date_created": "2022-05-26T22:13:11Z", "date_updated": "2022-05-26T22:13:33Z", "start_time": "2022-05-26T22:13:11Z", "duration": 21, "sid": "REcd9ff068b0f269f93832f4b4a93dbd08", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REcd9ff068b0f269f93832f4b4a93dbd08.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REcd9ff068b0f269f93832f4b4a93dbd08/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REcd9ff068b0f269f93832f4b4a93dbd08/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REcd9ff068b0f269f93832f4b4a93dbd08"}, "emitted_at": 1655893267009} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA5b6907d5ebca072c9bd0f46952b886b6", "conference_sid": null, "date_created": "2022-05-24T23:00:40Z", "date_updated": "2022-05-24T23:00:40Z", "start_time": "2022-05-24T23:00:40Z", "duration": 1, "sid": "RE81c10205dda16e7014593863c244db13", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE81c10205dda16e7014593863c244db13.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE81c10205dda16e7014593863c244db13/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE81c10205dda16e7014593863c244db13/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE81c10205dda16e7014593863c244db13"}, "emitted_at": 1655893267010} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA5b6907d5ebca072c9bd0f46952b886b6", "conference_sid": null, "date_created": "2022-05-24T23:00:38Z", "date_updated": "2022-05-24T23:00:39Z", "start_time": "2022-05-24T23:00:38Z", "duration": 1, "sid": "REa8ee0893e6fc656afaa55fbae00a6813", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa8ee0893e6fc656afaa55fbae00a6813.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa8ee0893e6fc656afaa55fbae00a6813/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa8ee0893e6fc656afaa55fbae00a6813/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa8ee0893e6fc656afaa55fbae00a6813"}, "emitted_at": 1655893267011} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA5b6907d5ebca072c9bd0f46952b886b6", "conference_sid": null, "date_created": "2022-05-24T23:00:37Z", "date_updated": "2022-05-24T23:00:37Z", "start_time": "2022-05-24T23:00:36Z", "duration": 1, "sid": "REa09b0092d28222e8dc4af7660bdcb110", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa09b0092d28222e8dc4af7660bdcb110.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa09b0092d28222e8dc4af7660bdcb110/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa09b0092d28222e8dc4af7660bdcb110/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa09b0092d28222e8dc4af7660bdcb110"}, "emitted_at": 1655893267012} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA5b6907d5ebca072c9bd0f46952b886b6", "conference_sid": null, "date_created": "2022-05-24T23:00:35Z", "date_updated": "2022-05-24T23:00:35Z", "start_time": "2022-05-24T23:00:35Z", "duration": 1, "sid": "REb42bbe5afa74e02fb02b2a1ef974237b", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb42bbe5afa74e02fb02b2a1ef974237b.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb42bbe5afa74e02fb02b2a1ef974237b/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb42bbe5afa74e02fb02b2a1ef974237b/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb42bbe5afa74e02fb02b2a1ef974237b"}, "emitted_at": 1655893267013} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA5b6907d5ebca072c9bd0f46952b886b6", "conference_sid": null, "date_created": "2022-05-24T23:00:33Z", "date_updated": "2022-05-24T23:00:34Z", "start_time": "2022-05-24T23:00:32Z", "duration": 2, "sid": "REa208d9124b5d0d9c5edd30cd25284a33", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa208d9124b5d0d9c5edd30cd25284a33.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa208d9124b5d0d9c5edd30cd25284a33/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa208d9124b5d0d9c5edd30cd25284a33/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa208d9124b5d0d9c5edd30cd25284a33"}, "emitted_at": 1655893267014} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA5b6907d5ebca072c9bd0f46952b886b6", "conference_sid": null, "date_created": "2022-05-24T23:00:31Z", "date_updated": "2022-05-24T23:00:32Z", "start_time": "2022-05-24T23:00:30Z", "duration": 2, "sid": "REcaab878fde266f2688cd90a88334a581", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REcaab878fde266f2688cd90a88334a581.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REcaab878fde266f2688cd90a88334a581/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REcaab878fde266f2688cd90a88334a581/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REcaab878fde266f2688cd90a88334a581"}, "emitted_at": 1655893267015} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA5b6907d5ebca072c9bd0f46952b886b6", "conference_sid": null, "date_created": "2022-05-24T23:00:29Z", "date_updated": "2022-05-24T23:00:30Z", "start_time": "2022-05-24T23:00:28Z", "duration": 2, "sid": "RE710daa26224ce27f4957a3f15819d641", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE710daa26224ce27f4957a3f15819d641.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE710daa26224ce27f4957a3f15819d641/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE710daa26224ce27f4957a3f15819d641/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE710daa26224ce27f4957a3f15819d641"}, "emitted_at": 1655893267016} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA5b6907d5ebca072c9bd0f46952b886b6", "conference_sid": null, "date_created": "2022-05-24T23:00:25Z", "date_updated": "2022-05-24T23:00:27Z", "start_time": "2022-05-24T23:00:25Z", "duration": 3, "sid": "REdc7979975a4c1e0ed01e9438a494aa51", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REdc7979975a4c1e0ed01e9438a494aa51.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REdc7979975a4c1e0ed01e9438a494aa51/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REdc7979975a4c1e0ed01e9438a494aa51/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REdc7979975a4c1e0ed01e9438a494aa51"}, "emitted_at": 1655893267017} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA5b6907d5ebca072c9bd0f46952b886b6", "conference_sid": null, "date_created": "2022-05-24T23:00:24Z", "date_updated": "2022-05-24T23:00:24Z", "start_time": "2022-05-24T23:00:23Z", "duration": 1, "sid": "RE584f9edddc9c69488bf778f66f01986e", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE584f9edddc9c69488bf778f66f01986e.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE584f9edddc9c69488bf778f66f01986e/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE584f9edddc9c69488bf778f66f01986e/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE584f9edddc9c69488bf778f66f01986e"}, "emitted_at": 1655893267018} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA5b6907d5ebca072c9bd0f46952b886b6", "conference_sid": null, "date_created": "2022-05-24T23:00:22Z", "date_updated": "2022-05-24T23:00:22Z", "start_time": "2022-05-24T23:00:22Z", "duration": 1, "sid": "REbda9ad7207f352a046ba30f21d9aac64", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REbda9ad7207f352a046ba30f21d9aac64.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REbda9ad7207f352a046ba30f21d9aac64/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REbda9ad7207f352a046ba30f21d9aac64/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REbda9ad7207f352a046ba30f21d9aac64"}, "emitted_at": 1655893267019} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA5b6907d5ebca072c9bd0f46952b886b6", "conference_sid": null, "date_created": "2022-05-24T23:00:20Z", "date_updated": "2022-05-24T23:00:21Z", "start_time": "2022-05-24T23:00:20Z", "duration": 1, "sid": "RE7a189bacb5dac74a30bbb9fd6836d8bd", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE7a189bacb5dac74a30bbb9fd6836d8bd.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE7a189bacb5dac74a30bbb9fd6836d8bd/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE7a189bacb5dac74a30bbb9fd6836d8bd/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE7a189bacb5dac74a30bbb9fd6836d8bd"}, "emitted_at": 1655893267020} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA5b6907d5ebca072c9bd0f46952b886b6", "conference_sid": null, "date_created": "2022-05-24T23:00:19Z", "date_updated": "2022-05-24T23:00:19Z", "start_time": "2022-05-24T23:00:18Z", "duration": 1, "sid": "RE864c566fd451a7fdc3f524545573909a", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE864c566fd451a7fdc3f524545573909a.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE864c566fd451a7fdc3f524545573909a/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE864c566fd451a7fdc3f524545573909a/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE864c566fd451a7fdc3f524545573909a"}, "emitted_at": 1655893267021} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA5b6907d5ebca072c9bd0f46952b886b6", "conference_sid": null, "date_created": "2022-05-24T23:00:17Z", "date_updated": "2022-05-24T23:00:18Z", "start_time": "2022-05-24T23:00:17Z", "duration": 1, "sid": "RE87b308746230bb1c3924edb28f59b3f8", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE87b308746230bb1c3924edb28f59b3f8.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE87b308746230bb1c3924edb28f59b3f8/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE87b308746230bb1c3924edb28f59b3f8/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE87b308746230bb1c3924edb28f59b3f8"}, "emitted_at": 1655893267022} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA5b6907d5ebca072c9bd0f46952b886b6", "conference_sid": null, "date_created": "2022-05-24T23:00:16Z", "date_updated": "2022-05-24T23:00:16Z", "start_time": "2022-05-24T23:00:15Z", "duration": 1, "sid": "RE9a873de5fe7bdc35c5e12e3177749f59", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9a873de5fe7bdc35c5e12e3177749f59.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9a873de5fe7bdc35c5e12e3177749f59/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9a873de5fe7bdc35c5e12e3177749f59/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9a873de5fe7bdc35c5e12e3177749f59"}, "emitted_at": 1655893267023} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA5b6907d5ebca072c9bd0f46952b886b6", "conference_sid": null, "date_created": "2022-05-24T23:00:13Z", "date_updated": "2022-05-24T23:00:15Z", "start_time": "2022-05-24T23:00:13Z", "duration": 2, "sid": "RE76fdf12ce0d5cc819ee117fa66b19fce", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE76fdf12ce0d5cc819ee117fa66b19fce.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE76fdf12ce0d5cc819ee117fa66b19fce/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE76fdf12ce0d5cc819ee117fa66b19fce/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE76fdf12ce0d5cc819ee117fa66b19fce"}, "emitted_at": 1655893267024} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA5b6907d5ebca072c9bd0f46952b886b6", "conference_sid": null, "date_created": "2022-05-24T23:00:12Z", "date_updated": "2022-05-24T23:00:12Z", "start_time": "2022-05-24T23:00:11Z", "duration": 1, "sid": "REb6acb75efe39edf2de14d640cab5c83b", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb6acb75efe39edf2de14d640cab5c83b.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb6acb75efe39edf2de14d640cab5c83b/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb6acb75efe39edf2de14d640cab5c83b/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb6acb75efe39edf2de14d640cab5c83b"}, "emitted_at": 1655893267025} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA696bd2d2e37ef8501f443807dce444a9", "conference_sid": null, "date_created": "2022-05-11T18:21:12Z", "date_updated": "2022-05-11T18:21:16Z", "start_time": "2022-05-11T18:21:12Z", "duration": 3, "sid": "REea3b05ba7d2bdf6b876e390da04ca563", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REea3b05ba7d2bdf6b876e390da04ca563.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REea3b05ba7d2bdf6b876e390da04ca563/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REea3b05ba7d2bdf6b876e390da04ca563/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REea3b05ba7d2bdf6b876e390da04ca563"}, "emitted_at": 1655893267026} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA696bd2d2e37ef8501f443807dce444a9", "conference_sid": null, "date_created": "2022-05-11T18:21:10Z", "date_updated": "2022-05-11T18:21:11Z", "start_time": "2022-05-11T18:21:10Z", "duration": 1, "sid": "REb9ff52417bdc0d8453f36f294f9f0396", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb9ff52417bdc0d8453f36f294f9f0396.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb9ff52417bdc0d8453f36f294f9f0396/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb9ff52417bdc0d8453f36f294f9f0396/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb9ff52417bdc0d8453f36f294f9f0396"}, "emitted_at": 1655893267027} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA696bd2d2e37ef8501f443807dce444a9", "conference_sid": null, "date_created": "2022-05-11T18:21:09Z", "date_updated": "2022-05-11T18:21:09Z", "start_time": "2022-05-11T18:21:08Z", "duration": 1, "sid": "RE29cbe18a706ba7956e16277f7c2300c1", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE29cbe18a706ba7956e16277f7c2300c1.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE29cbe18a706ba7956e16277f7c2300c1/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE29cbe18a706ba7956e16277f7c2300c1/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE29cbe18a706ba7956e16277f7c2300c1"}, "emitted_at": 1655893267028} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA696bd2d2e37ef8501f443807dce444a9", "conference_sid": null, "date_created": "2022-05-11T18:21:07Z", "date_updated": "2022-05-11T18:21:07Z", "start_time": "2022-05-11T18:21:07Z", "duration": 1, "sid": "RE0f8e84c849eac22ce546b607f343581c", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE0f8e84c849eac22ce546b607f343581c.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE0f8e84c849eac22ce546b607f343581c/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE0f8e84c849eac22ce546b607f343581c/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE0f8e84c849eac22ce546b607f343581c"}, "emitted_at": 1655893267029} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA696bd2d2e37ef8501f443807dce444a9", "conference_sid": null, "date_created": "2022-05-11T18:20:58Z", "date_updated": "2022-05-11T18:21:06Z", "start_time": "2022-05-11T18:20:58Z", "duration": 8, "sid": "RE9f8707f4a7ea29ac097af1126af5213d", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9f8707f4a7ea29ac097af1126af5213d.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9f8707f4a7ea29ac097af1126af5213d/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9f8707f4a7ea29ac097af1126af5213d/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE9f8707f4a7ea29ac097af1126af5213d"}, "emitted_at": 1655893267030} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA696bd2d2e37ef8501f443807dce444a9", "conference_sid": null, "date_created": "2022-05-11T18:20:57Z", "date_updated": "2022-05-11T18:20:57Z", "start_time": "2022-05-11T18:20:56Z", "duration": 1, "sid": "RE00be57745f3ed9a4580b6105ef6a5671", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE00be57745f3ed9a4580b6105ef6a5671.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE00be57745f3ed9a4580b6105ef6a5671/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE00be57745f3ed9a4580b6105ef6a5671/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE00be57745f3ed9a4580b6105ef6a5671"}, "emitted_at": 1655893267031} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA696bd2d2e37ef8501f443807dce444a9", "conference_sid": null, "date_created": "2022-05-11T18:20:55Z", "date_updated": "2022-05-11T18:20:56Z", "start_time": "2022-05-11T18:20:54Z", "duration": 1, "sid": "RE5f8c70f79a4ae3ef3a387f3d3b5caf4d", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE5f8c70f79a4ae3ef3a387f3d3b5caf4d.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE5f8c70f79a4ae3ef3a387f3d3b5caf4d/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE5f8c70f79a4ae3ef3a387f3d3b5caf4d/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE5f8c70f79a4ae3ef3a387f3d3b5caf4d"}, "emitted_at": 1655893267032} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAe86d27d7aba7c857135b46f52f578d0b", "conference_sid": null, "date_created": "2022-04-20T17:33:25Z", "date_updated": "2022-04-20T17:33:26Z", "start_time": "2022-04-20T17:33:25Z", "duration": 1, "sid": "RE582e4deeefc8f5f67c89542aa878d1b5", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE582e4deeefc8f5f67c89542aa878d1b5.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE582e4deeefc8f5f67c89542aa878d1b5/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE582e4deeefc8f5f67c89542aa878d1b5/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE582e4deeefc8f5f67c89542aa878d1b5"}, "emitted_at": 1655893267244} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAe86d27d7aba7c857135b46f52f578d0b", "conference_sid": null, "date_created": "2022-04-20T17:33:23Z", "date_updated": "2022-04-20T17:33:24Z", "start_time": "2022-04-20T17:33:23Z", "duration": 2, "sid": "RE018b40e386dd9562b79488eadbdab63c", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE018b40e386dd9562b79488eadbdab63c.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE018b40e386dd9562b79488eadbdab63c/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE018b40e386dd9562b79488eadbdab63c/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE018b40e386dd9562b79488eadbdab63c"}, "emitted_at": 1655893267245} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAade9599c9cf53091c1787898093e2675", "conference_sid": null, "date_created": "2022-04-06T21:01:01Z", "date_updated": "2022-04-06T21:01:01Z", "start_time": "2022-04-06T21:01:00Z", "duration": 1, "sid": "RE7c783f510306f155a6ec9d9d1805bca6", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE7c783f510306f155a6ec9d9d1805bca6.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE7c783f510306f155a6ec9d9d1805bca6/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE7c783f510306f155a6ec9d9d1805bca6/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE7c783f510306f155a6ec9d9d1805bca6"}, "emitted_at": 1655893267246} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAade9599c9cf53091c1787898093e2675", "conference_sid": null, "date_created": "2022-04-06T21:00:59Z", "date_updated": "2022-04-06T21:00:59Z", "start_time": "2022-04-06T21:00:59Z", "duration": 1, "sid": "RE978ab4196373eae0c9a59c18df70875d", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE978ab4196373eae0c9a59c18df70875d.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE978ab4196373eae0c9a59c18df70875d/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE978ab4196373eae0c9a59c18df70875d/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE978ab4196373eae0c9a59c18df70875d"}, "emitted_at": 1655893267247} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAade9599c9cf53091c1787898093e2675", "conference_sid": null, "date_created": "2022-04-06T21:00:57Z", "date_updated": "2022-04-06T21:00:58Z", "start_time": "2022-04-06T21:00:57Z", "duration": 1, "sid": "REba6dd3aac34a37a9328b8650886b270c", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REba6dd3aac34a37a9328b8650886b270c.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REba6dd3aac34a37a9328b8650886b270c/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REba6dd3aac34a37a9328b8650886b270c/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REba6dd3aac34a37a9328b8650886b270c"}, "emitted_at": 1655893267248} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAa3887d4de4849a630bc369351f300171", "conference_sid": null, "date_created": "2022-04-06T20:57:37Z", "date_updated": "2022-04-06T20:57:37Z", "start_time": "2022-04-06T20:57:36Z", "duration": 1, "sid": "RE8a965af19f26ab0ae81467cdb64530cc", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8a965af19f26ab0ae81467cdb64530cc.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8a965af19f26ab0ae81467cdb64530cc/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8a965af19f26ab0ae81467cdb64530cc/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8a965af19f26ab0ae81467cdb64530cc"}, "emitted_at": 1655893267249} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAa3887d4de4849a630bc369351f300171", "conference_sid": null, "date_created": "2022-04-06T20:57:35Z", "date_updated": "2022-04-06T20:57:35Z", "start_time": "2022-04-06T20:57:35Z", "duration": 1, "sid": "RE297c30a6ed31bfbb9260442d244307a4", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE297c30a6ed31bfbb9260442d244307a4.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE297c30a6ed31bfbb9260442d244307a4/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE297c30a6ed31bfbb9260442d244307a4/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE297c30a6ed31bfbb9260442d244307a4"}, "emitted_at": 1655893267250} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CAa3887d4de4849a630bc369351f300171", "conference_sid": null, "date_created": "2022-04-06T20:57:33Z", "date_updated": "2022-04-06T20:57:34Z", "start_time": "2022-04-06T20:57:33Z", "duration": 1, "sid": "RE8af9d1a10ae7d191707b1eb56b1251ad", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8af9d1a10ae7d191707b1eb56b1251ad.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8af9d1a10ae7d191707b1eb56b1251ad/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8af9d1a10ae7d191707b1eb56b1251ad/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE8af9d1a10ae7d191707b1eb56b1251ad"}, "emitted_at": 1655893267251} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA78611ecf5e7f101b1a59be31b8f520f7", "conference_sid": null, "date_created": "2022-03-13T23:56:33Z", "date_updated": "2022-03-13T23:56:37Z", "start_time": "2022-03-13T23:56:32Z", "duration": 5, "sid": "REd2d304b862d9860c1843ed5e80212081", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REd2d304b862d9860c1843ed5e80212081.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REd2d304b862d9860c1843ed5e80212081/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REd2d304b862d9860c1843ed5e80212081/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REd2d304b862d9860c1843ed5e80212081"}, "emitted_at": 1655893267253} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA78611ecf5e7f101b1a59be31b8f520f7", "conference_sid": null, "date_created": "2022-03-13T23:56:31Z", "date_updated": "2022-03-13T23:56:31Z", "start_time": "2022-03-13T23:56:31Z", "duration": 1, "sid": "REa944f91cad14528766b3dfb3152fbb89", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa944f91cad14528766b3dfb3152fbb89.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa944f91cad14528766b3dfb3152fbb89/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa944f91cad14528766b3dfb3152fbb89/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REa944f91cad14528766b3dfb3152fbb89"}, "emitted_at": 1655893267254} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA78611ecf5e7f101b1a59be31b8f520f7", "conference_sid": null, "date_created": "2022-03-13T23:56:29Z", "date_updated": "2022-03-13T23:56:30Z", "start_time": "2022-03-13T23:56:29Z", "duration": 1, "sid": "REb6d63081540fd7ec9835f267fa722ff4", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb6d63081540fd7ec9835f267fa722ff4.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb6d63081540fd7ec9835f267fa722ff4/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb6d63081540fd7ec9835f267fa722ff4/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/REb6d63081540fd7ec9835f267fa722ff4"}, "emitted_at": 1655893267255} +{"stream": "recordings", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "call_sid": "CA78611ecf5e7f101b1a59be31b8f520f7", "conference_sid": null, "date_created": "2022-03-13T23:56:27Z", "date_updated": "2022-03-13T23:56:28Z", "start_time": "2022-03-13T23:56:26Z", "duration": 2, "sid": "RE6be6c79bca501a7d5284c5ebcd87ec22", "price": -0.0025, "price_unit": "USD", "status": "completed", "channels": 1, "source": "RecordVerb", "error_code": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE6be6c79bca501a7d5284c5ebcd87ec22.json", "encryption_details": null, "subresource_uris": {"add_on_results": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE6be6c79bca501a7d5284c5ebcd87ec22/AddOnResults.json", "transcriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE6be6c79bca501a7d5284c5ebcd87ec22/Transcriptions.json"}, "media_url": "https://api.twilio.com/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Recordings/RE6be6c79bca501a7d5284c5ebcd87ec22"}, "emitted_at": 1655893267256} +{"stream": "transcriptions", "data": {"sid": "TR2164f564775dc570bc7b1325f8afbf58", "date_created": "2022-06-17T22:27:57Z", "date_updated": "2022-06-18T01:03:23Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "completed", "type": "fast", "recording_sid": "RE71fc6f69d0b58d97fa2e0e94a6b28d39", "duration": 4, "transcription_text": "I am sorry you don't qualify but have a great day.", "api_version": "2010-04-01", "price": -0.05, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR2164f564775dc570bc7b1325f8afbf58.json"}, "emitted_at": 1655893269181} +{"stream": "transcriptions", "data": {"sid": "TR53a3448f0f5e8a64a03cfec4bd067b36", "date_created": "2022-06-17T22:27:47Z", "date_updated": "2022-06-18T00:38:46Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "completed", "type": "fast", "recording_sid": "RE23c1f81c23a44f80ccb984129db33a10", "duration": 9, "transcription_text": "So not if occasion you call about the tax compromise program. Do you still owe $5000.00 and backs tax? Yes.", "api_version": "2010-04-01", "price": -0.05, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR53a3448f0f5e8a64a03cfec4bd067b36.json"}, "emitted_at": 1655893269187} +{"stream": "transcriptions", "data": {"sid": "TR8cd37054f4bf7443ab7ec945ee002c4c", "date_created": "2022-06-16T20:01:35Z", "date_updated": "2022-06-16T20:01:35Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REfea1fd60331fd295d104927d5692b237", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR8cd37054f4bf7443ab7ec945ee002c4c.json"}, "emitted_at": 1655893269191} +{"stream": "transcriptions", "data": {"sid": "TRddb62a2312828ab49616e1ca9abe93f5", "date_created": "2022-06-16T20:01:29Z", "date_updated": "2022-06-16T20:01:29Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE8bf14aab39ee9656698be4f3c116b2a8", "duration": 2, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRddb62a2312828ab49616e1ca9abe93f5.json"}, "emitted_at": 1655893269194} +{"stream": "transcriptions", "data": {"sid": "TRc25273bf6e1a4af5c03ea0feaa665a71", "date_created": "2022-06-16T20:01:25Z", "date_updated": "2022-06-16T20:01:26Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE1cc63beca47ac74ed79dd3beb32ea684", "duration": 2, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRc25273bf6e1a4af5c03ea0feaa665a71.json"}, "emitted_at": 1655893269197} +{"stream": "transcriptions", "data": {"sid": "TR9b0bdc132040c6ee58152d1de726c1b1", "date_created": "2022-06-16T20:01:23Z", "date_updated": "2022-06-16T20:01:23Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE291950779d909ba4260a9d253e9a280b", "duration": 2, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR9b0bdc132040c6ee58152d1de726c1b1.json"}, "emitted_at": 1655893269201} +{"stream": "transcriptions", "data": {"sid": "TR367d55857534428011a3771214291a00", "date_created": "2022-06-16T20:01:00Z", "date_updated": "2022-06-16T20:01:00Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE19fa8999a6c62d0a88b6dfa352ff04fb", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR367d55857534428011a3771214291a00.json"}, "emitted_at": 1655893269204} +{"stream": "transcriptions", "data": {"sid": "TRec1095aa94b9ce655c7b28ff6954e822", "date_created": "2022-06-16T20:00:58Z", "date_updated": "2022-06-17T00:10:11Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "completed", "type": "fast", "recording_sid": "RE64bd8b998bb10e32794685da660cfda5", "duration": 3, "transcription_text": "", "api_version": "2010-04-01", "price": -0.05, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRec1095aa94b9ce655c7b28ff6954e822.json"}, "emitted_at": 1655893269207} +{"stream": "transcriptions", "data": {"sid": "TR55a59c6fb118c937f8bfe4301ceb113c", "date_created": "2022-06-16T20:00:55Z", "date_updated": "2022-06-16T20:00:55Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REfb431e39b6f99a3b0dd057c46344fd71", "duration": 2, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR55a59c6fb118c937f8bfe4301ceb113c.json"}, "emitted_at": 1655893269210} +{"stream": "transcriptions", "data": {"sid": "TR4cfb417bfd75ed4e9e12ffe7c4b01aec", "date_created": "2022-06-02T12:54:03Z", "date_updated": "2022-06-02T12:54:03Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE689d590e9c6e2fbb47a925d4a0596226", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR4cfb417bfd75ed4e9e12ffe7c4b01aec.json"}, "emitted_at": 1655893269212} +{"stream": "transcriptions", "data": {"sid": "TR3bad3dee16fbc9febb016da37eca5742", "date_created": "2022-05-26T22:14:18Z", "date_updated": "2022-05-27T03:00:17Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "completed", "type": "fast", "recording_sid": "RE75c4f893b307d6d02932adce71b4add9", "duration": 6, "transcription_text": "Hi, this is Mark with an important message about your automobile service contract seems like the time.", "api_version": "2010-04-01", "price": -0.05, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR3bad3dee16fbc9febb016da37eca5742.json"}, "emitted_at": 1655893269214} +{"stream": "transcriptions", "data": {"sid": "TR293d39532a59fc7d30a176c9b81a1e2f", "date_created": "2022-05-26T22:14:03Z", "date_updated": "2022-05-27T02:45:11Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "completed", "type": "fast", "recording_sid": "RE8ab08e0d46c9bd4876ad4403e2b6abeb", "duration": 22, "transcription_text": "Hi, this is Mark with an important message about your automobile service contract seems like the time to renew or extend your service contract has expired or will be expiring shortly. If you would like to keep coverage or extend it, press 8 to speak to a customer service agent and go over options. Press the number 9. If you are declining coverage or wish not to be reminded to get.", "api_version": "2010-04-01", "price": -0.05, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR293d39532a59fc7d30a176c9b81a1e2f.json"}, "emitted_at": 1655893269216} +{"stream": "transcriptions", "data": {"sid": "TR734306b34e92f031ab37f5d53e9471f5", "date_created": "2022-05-26T22:13:33Z", "date_updated": "2022-05-27T02:58:26Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "completed", "type": "fast", "recording_sid": "REcd9ff068b0f269f93832f4b4a93dbd08", "duration": 21, "transcription_text": "This is Mark with an important message about your automobile service contract seems like the time to renew or extend your service contract has expired or will be expiring shortly. If you would like to keep coverage or extend it, press 8 to speak to a customer service agent and go over options. Press the number 9 if you are declining coverage or wish not to be reminded again.", "api_version": "2010-04-01", "price": -0.05, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR734306b34e92f031ab37f5d53e9471f5.json"}, "emitted_at": 1655893269218} +{"stream": "transcriptions", "data": {"sid": "TR0fe86128cc3260de50d364fa458a1541", "date_created": "2022-05-24T23:00:40Z", "date_updated": "2022-05-24T23:00:40Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE81c10205dda16e7014593863c244db13", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR0fe86128cc3260de50d364fa458a1541.json"}, "emitted_at": 1655893269220} +{"stream": "transcriptions", "data": {"sid": "TR80b52f875892e19096c06a52256e14bb", "date_created": "2022-05-24T23:00:39Z", "date_updated": "2022-05-24T23:00:39Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REa8ee0893e6fc656afaa55fbae00a6813", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR80b52f875892e19096c06a52256e14bb.json"}, "emitted_at": 1655893269221} +{"stream": "transcriptions", "data": {"sid": "TR4c7f7f4344f48a3ef97ca4f3b6e1055a", "date_created": "2022-05-24T23:00:37Z", "date_updated": "2022-05-24T23:00:38Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REa09b0092d28222e8dc4af7660bdcb110", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR4c7f7f4344f48a3ef97ca4f3b6e1055a.json"}, "emitted_at": 1655893269222} +{"stream": "transcriptions", "data": {"sid": "TRe5b1b6c45f7ba5e513a6f91896b869a9", "date_created": "2022-05-24T23:00:35Z", "date_updated": "2022-05-24T23:00:35Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REb42bbe5afa74e02fb02b2a1ef974237b", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRe5b1b6c45f7ba5e513a6f91896b869a9.json"}, "emitted_at": 1655893269223} +{"stream": "transcriptions", "data": {"sid": "TRb9ae9a1a77b0b4a46a4b827751bc8d2f", "date_created": "2022-05-24T23:00:34Z", "date_updated": "2022-05-24T23:00:34Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REa208d9124b5d0d9c5edd30cd25284a33", "duration": 2, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRb9ae9a1a77b0b4a46a4b827751bc8d2f.json"}, "emitted_at": 1655893269225} +{"stream": "transcriptions", "data": {"sid": "TRe1367ec6f4a19672bf07f083798c063d", "date_created": "2022-05-24T23:00:32Z", "date_updated": "2022-05-24T23:00:32Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REcaab878fde266f2688cd90a88334a581", "duration": 2, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRe1367ec6f4a19672bf07f083798c063d.json"}, "emitted_at": 1655893269226} +{"stream": "transcriptions", "data": {"sid": "TR04a84af4690e55058b945cc49e6b0157", "date_created": "2022-05-24T23:00:30Z", "date_updated": "2022-05-24T23:00:30Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE710daa26224ce27f4957a3f15819d641", "duration": 2, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR04a84af4690e55058b945cc49e6b0157.json"}, "emitted_at": 1655893269227} +{"stream": "transcriptions", "data": {"sid": "TR67511582df0467c9c80894ce02b0971e", "date_created": "2022-05-24T23:00:27Z", "date_updated": "2022-05-25T04:11:47Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "completed", "type": "fast", "recording_sid": "REdc7979975a4c1e0ed01e9438a494aa51", "duration": 3, "transcription_text": "", "api_version": "2010-04-01", "price": -0.05, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR67511582df0467c9c80894ce02b0971e.json"}, "emitted_at": 1655893269228} +{"stream": "transcriptions", "data": {"sid": "TRda9bd52357697993fc14b84b6a3144f2", "date_created": "2022-05-24T23:00:24Z", "date_updated": "2022-05-24T23:00:25Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE584f9edddc9c69488bf778f66f01986e", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRda9bd52357697993fc14b84b6a3144f2.json"}, "emitted_at": 1655893269229} +{"stream": "transcriptions", "data": {"sid": "TR5e5f662be55b80e870891813a04ed544", "date_created": "2022-05-24T23:00:22Z", "date_updated": "2022-05-24T23:00:24Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REbda9ad7207f352a046ba30f21d9aac64", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR5e5f662be55b80e870891813a04ed544.json"}, "emitted_at": 1655893269230} +{"stream": "transcriptions", "data": {"sid": "TR18431c9885cbe2330737714ef59b33e7", "date_created": "2022-05-24T23:00:21Z", "date_updated": "2022-05-24T23:00:21Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE7a189bacb5dac74a30bbb9fd6836d8bd", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR18431c9885cbe2330737714ef59b33e7.json"}, "emitted_at": 1655893269230} +{"stream": "transcriptions", "data": {"sid": "TRb775d840b61007da490fbf2de7031e68", "date_created": "2022-05-24T23:00:19Z", "date_updated": "2022-05-24T23:00:19Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE864c566fd451a7fdc3f524545573909a", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRb775d840b61007da490fbf2de7031e68.json"}, "emitted_at": 1655893269231} +{"stream": "transcriptions", "data": {"sid": "TR0744b8c3634da787f83bcf0deed56924", "date_created": "2022-05-24T23:00:18Z", "date_updated": "2022-05-24T23:00:18Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE87b308746230bb1c3924edb28f59b3f8", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR0744b8c3634da787f83bcf0deed56924.json"}, "emitted_at": 1655893269232} +{"stream": "transcriptions", "data": {"sid": "TR035531c4aca6b95c4cabf7bd97462f8b", "date_created": "2022-05-24T23:00:16Z", "date_updated": "2022-05-24T23:00:16Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE9a873de5fe7bdc35c5e12e3177749f59", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR035531c4aca6b95c4cabf7bd97462f8b.json"}, "emitted_at": 1655893269233} +{"stream": "transcriptions", "data": {"sid": "TRd12925c6968a11a51d3d8eb5749a2add", "date_created": "2022-05-24T23:00:15Z", "date_updated": "2022-05-24T23:00:15Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE76fdf12ce0d5cc819ee117fa66b19fce", "duration": 2, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRd12925c6968a11a51d3d8eb5749a2add.json"}, "emitted_at": 1655893269234} +{"stream": "transcriptions", "data": {"sid": "TR7b2b47c086937c50eb874eda729501fd", "date_created": "2022-05-24T23:00:12Z", "date_updated": "2022-05-24T23:00:12Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REb6acb75efe39edf2de14d640cab5c83b", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR7b2b47c086937c50eb874eda729501fd.json"}, "emitted_at": 1655893269235} +{"stream": "transcriptions", "data": {"sid": "TR1387d244df25943a5fdd36f4af61139c", "date_created": "2022-05-11T18:21:16Z", "date_updated": "2022-05-11T20:09:20Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "completed", "type": "fast", "recording_sid": "REea3b05ba7d2bdf6b876e390da04ca563", "duration": 3, "transcription_text": "Sorry you don't qualify but have a great a.", "api_version": "2010-04-01", "price": -0.05, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR1387d244df25943a5fdd36f4af61139c.json"}, "emitted_at": 1655893269235} +{"stream": "transcriptions", "data": {"sid": "TR3d6a5444990fa63a42b3fcc36fe6eef7", "date_created": "2022-05-11T18:21:11Z", "date_updated": "2022-05-11T18:21:11Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REb9ff52417bdc0d8453f36f294f9f0396", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR3d6a5444990fa63a42b3fcc36fe6eef7.json"}, "emitted_at": 1655893269236} +{"stream": "transcriptions", "data": {"sid": "TR5209d766d55be4cee0c66c580e7e63cc", "date_created": "2022-05-11T18:21:09Z", "date_updated": "2022-05-11T18:21:09Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE29cbe18a706ba7956e16277f7c2300c1", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR5209d766d55be4cee0c66c580e7e63cc.json"}, "emitted_at": 1655893269237} +{"stream": "transcriptions", "data": {"sid": "TRe7581351b23274cab2f998568f4e3f3b", "date_created": "2022-05-11T18:21:08Z", "date_updated": "2022-05-11T18:21:08Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE0f8e84c849eac22ce546b607f343581c", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRe7581351b23274cab2f998568f4e3f3b.json"}, "emitted_at": 1655893269238} +{"stream": "transcriptions", "data": {"sid": "TRf81e29cb3eb72d693fa1797718f1a4f0", "date_created": "2022-05-11T18:21:06Z", "date_updated": "2022-05-11T21:41:41Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "completed", "type": "fast", "recording_sid": "RE9f8707f4a7ea29ac097af1126af5213d", "duration": 8, "transcription_text": "This is a lot of cation all about detect compromised program. You still owe $5000.00 in backs, tax.", "api_version": "2010-04-01", "price": -0.05, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRf81e29cb3eb72d693fa1797718f1a4f0.json"}, "emitted_at": 1655893269239} +{"stream": "transcriptions", "data": {"sid": "TR6dce7e59c2907eade00b28ef31c7579a", "date_created": "2022-05-11T18:20:57Z", "date_updated": "2022-05-11T18:20:57Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE00be57745f3ed9a4580b6105ef6a5671", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR6dce7e59c2907eade00b28ef31c7579a.json"}, "emitted_at": 1655893269240} +{"stream": "transcriptions", "data": {"sid": "TR7cda62cf1246fee05a3c7f9ff6b942da", "date_created": "2022-05-11T18:20:56Z", "date_updated": "2022-05-11T18:20:56Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE5f8c70f79a4ae3ef3a387f3d3b5caf4d", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR7cda62cf1246fee05a3c7f9ff6b942da.json"}, "emitted_at": 1655893269240} +{"stream": "transcriptions", "data": {"sid": "TRdc7087fcd0d117e3eb7df3ad06cdd5bb", "date_created": "2022-04-20T17:33:26Z", "date_updated": "2022-04-20T17:33:27Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE582e4deeefc8f5f67c89542aa878d1b5", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRdc7087fcd0d117e3eb7df3ad06cdd5bb.json"}, "emitted_at": 1655893269241} +{"stream": "transcriptions", "data": {"sid": "TRd71ab88cb67c70d7474ae55be0523ba0", "date_created": "2022-04-20T17:33:24Z", "date_updated": "2022-04-20T17:33:25Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE018b40e386dd9562b79488eadbdab63c", "duration": 2, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRd71ab88cb67c70d7474ae55be0523ba0.json"}, "emitted_at": 1655893269242} +{"stream": "transcriptions", "data": {"sid": "TR5a184f2893e88e5ea741a1f8bca61f38", "date_created": "2022-04-06T21:01:01Z", "date_updated": "2022-04-06T21:01:02Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE7c783f510306f155a6ec9d9d1805bca6", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR5a184f2893e88e5ea741a1f8bca61f38.json"}, "emitted_at": 1655893269243} +{"stream": "transcriptions", "data": {"sid": "TRcdbab9cc0a01f47fbc2a6ad5cfe68df2", "date_created": "2022-04-06T21:00:58Z", "date_updated": "2022-04-06T21:01:00Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REba6dd3aac34a37a9328b8650886b270c", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRcdbab9cc0a01f47fbc2a6ad5cfe68df2.json"}, "emitted_at": 1655893269243} +{"stream": "transcriptions", "data": {"sid": "TRb4ec5780701de39b7b967c35b1aa94fc", "date_created": "2022-04-06T20:57:37Z", "date_updated": "2022-04-06T20:57:38Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE8a965af19f26ab0ae81467cdb64530cc", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRb4ec5780701de39b7b967c35b1aa94fc.json"}, "emitted_at": 1655893269244} +{"stream": "transcriptions", "data": {"sid": "TR72fb853e9b69dadbbcdbdce5a97709b2", "date_created": "2022-03-13T23:56:37Z", "date_updated": "2022-03-13T23:56:53Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "completed", "type": "fast", "recording_sid": "REd2d304b862d9860c1843ed5e80212081", "duration": 5, "transcription_text": "Ring ring but.", "api_version": "2010-04-01", "price": -0.05, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR72fb853e9b69dadbbcdbdce5a97709b2.json"}, "emitted_at": 1655893269245} +{"stream": "transcriptions", "data": {"sid": "TR0a36741bbf1962633f05b1a4a5c10037", "date_created": "2022-03-13T23:56:31Z", "date_updated": "2022-03-13T23:56:32Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REa944f91cad14528766b3dfb3152fbb89", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR0a36741bbf1962633f05b1a4a5c10037.json"}, "emitted_at": 1655893269245} +{"stream": "transcriptions", "data": {"sid": "TRa31245a4e304c76a9567916abb6e2c09", "date_created": "2022-03-13T23:56:30Z", "date_updated": "2022-03-13T23:56:32Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REb6d63081540fd7ec9835f267fa722ff4", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRa31245a4e304c76a9567916abb6e2c09.json"}, "emitted_at": 1655893269246} +{"stream": "transcriptions", "data": {"sid": "TR1fd4fe6a3fcc8be74ac04275faccd8f9", "date_created": "2022-03-13T23:56:28Z", "date_updated": "2022-03-13T23:56:28Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE6be6c79bca501a7d5284c5ebcd87ec22", "duration": 2, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR1fd4fe6a3fcc8be74ac04275faccd8f9.json"}, "emitted_at": 1655893269247} +{"stream": "transcriptions", "data": {"sid": "TR3688e064630342ac21e4102ea9c94b1b", "date_created": "2021-11-09T15:17:49Z", "date_updated": "2021-11-09T15:17:49Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REbffbd7d59d47bf8e0d485b93ca0ef6d5", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR3688e064630342ac21e4102ea9c94b1b.json"}, "emitted_at": 1655893269248} +{"stream": "transcriptions", "data": {"sid": "TR064fe6e8168e45a5cd45152b8ba18493", "date_created": "2021-11-09T15:17:48Z", "date_updated": "2021-11-09T15:17:48Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REdad12265537ffc795ed39e1a3cce523a", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR064fe6e8168e45a5cd45152b8ba18493.json"}, "emitted_at": 1655893269248} +{"stream": "transcriptions", "data": {"sid": "TR01a6513a2683bd730dcfcbe9c9be738e", "date_created": "2021-11-09T15:17:46Z", "date_updated": "2021-11-09T15:17:46Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REdc103d801860b2f1f22b7d6a49548019", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR01a6513a2683bd730dcfcbe9c9be738e.json"}, "emitted_at": 1655893269249} +{"stream": "transcriptions", "data": {"sid": "TRc4f30f9c7eaa94ef8048fb4f0f4556ef", "date_created": "2021-11-09T15:17:44Z", "date_updated": "2021-11-09T15:17:45Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE6198d4cd450f160dbda29c794a0c576a", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRc4f30f9c7eaa94ef8048fb4f0f4556ef.json"}, "emitted_at": 1655893269250} +{"stream": "transcriptions", "data": {"sid": "TR6132917758fbfb4f624419cb66331c95", "date_created": "2021-09-17T18:23:12Z", "date_updated": "2021-09-17T18:23:12Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE4bfead7ec397e16f280c6c698b65dc22", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR6132917758fbfb4f624419cb66331c95.json"}, "emitted_at": 1655893269250} +{"stream": "transcriptions", "data": {"sid": "TR15fa1d56da27a5ff107eaec2968e8e1d", "date_created": "2021-09-17T17:34:41Z", "date_updated": "2021-09-17T17:34:41Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REd1d3488451ffb2f1377ff31d7131b673", "duration": 2, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR15fa1d56da27a5ff107eaec2968e8e1d.json"}, "emitted_at": 1655893269251} +{"stream": "transcriptions", "data": {"sid": "TRebbad47aa174c3f15f2fbbe1b03aba45", "date_created": "2021-09-17T15:28:03Z", "date_updated": "2021-09-17T15:28:03Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REbee274393d13ac1591be30f07e13264a", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRebbad47aa174c3f15f2fbbe1b03aba45.json"}, "emitted_at": 1655893269252} +{"stream": "transcriptions", "data": {"sid": "TRfd99bdaeb685d2f07f70e6ac9524e31f", "date_created": "2021-09-17T15:28:01Z", "date_updated": "2021-09-17T15:28:01Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE910e3308390a89a6df0f06c8d3803dc6", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRfd99bdaeb685d2f07f70e6ac9524e31f.json"}, "emitted_at": 1655893269252} +{"stream": "transcriptions", "data": {"sid": "TR4d7dd80392e789534fbae821d590cbcf", "date_created": "2021-09-17T15:27:58Z", "date_updated": "2021-09-17T15:27:59Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REa86ec5a373b812d685919ee3c21f2ceb", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR4d7dd80392e789534fbae821d590cbcf.json"}, "emitted_at": 1655893269253} +{"stream": "transcriptions", "data": {"sid": "TRfcad5676900e5cb28455de93beb321fe", "date_created": "2021-09-17T15:27:57Z", "date_updated": "2021-09-17T15:27:57Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE85f33958924c33d3220a1c09cddf4503", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRfcad5676900e5cb28455de93beb321fe.json"}, "emitted_at": 1655893269254} +{"stream": "transcriptions", "data": {"sid": "TR4939bd4a8782a0acb72ca52881b9f592", "date_created": "2021-09-17T15:27:53Z", "date_updated": "2021-09-17T15:27:54Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE57c6ee3c57200f7d15db1184b218dd59", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR4939bd4a8782a0acb72ca52881b9f592.json"}, "emitted_at": 1655893269255} +{"stream": "transcriptions", "data": {"sid": "TRd13793418a0c277d795a9c45e7c10c71", "date_created": "2021-09-17T15:27:52Z", "date_updated": "2021-09-17T15:27:52Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE4f69bb8de23bb14ae6a793a69859886f", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRd13793418a0c277d795a9c45e7c10c71.json"}, "emitted_at": 1655893269255} +{"stream": "transcriptions", "data": {"sid": "TRb798fdc68f9e84c6621ff0522aa38358", "date_created": "2021-09-16T15:01:56Z", "date_updated": "2021-09-16T15:25:17Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "completed", "type": "fast", "recording_sid": "RE0409a25472990d48053ad2c0ca5c1104", "duration": 5, "transcription_text": "", "api_version": "2010-04-01", "price": -0.05, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRb798fdc68f9e84c6621ff0522aa38358.json"}, "emitted_at": 1655893269256} +{"stream": "transcriptions", "data": {"sid": "TR7a7ae8af4043bd3a8a3c8538360a46c1", "date_created": "2021-09-16T15:01:51Z", "date_updated": "2021-09-16T15:01:51Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE62cc065b3faedc451bf7d59a7ebc6873", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR7a7ae8af4043bd3a8a3c8538360a46c1.json"}, "emitted_at": 1655893269257} +{"stream": "transcriptions", "data": {"sid": "TR05f023d08db690cdf505eaa18c6f186f", "date_created": "2021-09-16T15:01:49Z", "date_updated": "2021-09-16T15:01:50Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REe2cdd26ec4f690e6eeaf803ab17a7964", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR05f023d08db690cdf505eaa18c6f186f.json"}, "emitted_at": 1655893269257} +{"stream": "transcriptions", "data": {"sid": "TR7532179efe3e4bce446b8aaec945d03e", "date_created": "2021-09-16T15:01:48Z", "date_updated": "2021-09-16T15:01:48Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE376ecf266e9a7952e3a8878a6bb79b04", "duration": 2, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR7532179efe3e4bce446b8aaec945d03e.json"}, "emitted_at": 1655893269258} +{"stream": "transcriptions", "data": {"sid": "TR314300c14e621b7394fc62c55a22b671", "date_created": "2021-09-16T15:01:38Z", "date_updated": "2021-09-16T15:01:39Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REc7f9987a1f4883b1f3313f3323df2680", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR314300c14e621b7394fc62c55a22b671.json"}, "emitted_at": 1655893269259} +{"stream": "transcriptions", "data": {"sid": "TRaffb224adfbdf718d39d989e107280cb", "date_created": "2021-09-16T15:01:33Z", "date_updated": "2021-09-16T15:01:33Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE269500bc54d8b60711903d3be48f6223", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRaffb224adfbdf718d39d989e107280cb.json"}, "emitted_at": 1655893269259} +{"stream": "transcriptions", "data": {"sid": "TR6c7f155485bf0a940345c8e4b5b5957c", "date_created": "2021-09-16T15:01:32Z", "date_updated": "2021-09-16T15:01:32Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE5168300530a0de32a559618a4e800d8c", "duration": 2, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR6c7f155485bf0a940345c8e4b5b5957c.json"}, "emitted_at": 1655893269260} +{"stream": "transcriptions", "data": {"sid": "TR89b0b9796593b714a4a27d1434b1bb89", "date_created": "2021-09-16T15:01:27Z", "date_updated": "2021-09-16T15:01:29Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE0e61dfff7ae43312957252ea208443be", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR89b0b9796593b714a4a27d1434b1bb89.json"}, "emitted_at": 1655893269261} +{"stream": "transcriptions", "data": {"sid": "TRbcfc9bba83ecc911ae9ee44366e95e6e", "date_created": "2021-09-16T15:01:21Z", "date_updated": "2021-09-16T15:01:21Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE8d43912fa4f42ca38909319610fa5e70", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRbcfc9bba83ecc911ae9ee44366e95e6e.json"}, "emitted_at": 1655893269261} +{"stream": "transcriptions", "data": {"sid": "TRd29d25834879813ffa4fff4f4917fb2b", "date_created": "2021-09-16T15:01:19Z", "date_updated": "2021-09-16T15:01:20Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE47e86a14cae026ec7ada2ca22bfe3361", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRd29d25834879813ffa4fff4f4917fb2b.json"}, "emitted_at": 1655893269262} +{"stream": "transcriptions", "data": {"sid": "TRbc48a66bd0a124aac457b4e1f41979c9", "date_created": "2021-09-15T18:10:50Z", "date_updated": "2021-09-15T18:10:50Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE203556d025cdd7a27f017a02b6510639", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRbc48a66bd0a124aac457b4e1f41979c9.json"}, "emitted_at": 1655893269263} +{"stream": "transcriptions", "data": {"sid": "TRf8f94fef39b04e03ef25a9b4cd349727", "date_created": "2021-09-15T18:10:34Z", "date_updated": "2021-09-15T18:10:34Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE522daa3a03071a120a7b6a66712c9e3d", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRf8f94fef39b04e03ef25a9b4cd349727.json"}, "emitted_at": 1655893269263} +{"stream": "transcriptions", "data": {"sid": "TRb5a30b17e0a266c13821b283a516f66a", "date_created": "2021-09-14T15:18:06Z", "date_updated": "2021-09-14T15:18:07Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE8c787a1ad02eaea78625d6574e8e4670", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRb5a30b17e0a266c13821b283a516f66a.json"}, "emitted_at": 1655893269264} +{"stream": "transcriptions", "data": {"sid": "TR8e5bcce74a6e15b2a5d4f992f4393cd5", "date_created": "2021-09-14T15:18:05Z", "date_updated": "2021-09-14T15:18:05Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE6a74d566287d2e99c57c42e2ce9e5d10", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR8e5bcce74a6e15b2a5d4f992f4393cd5.json"}, "emitted_at": 1655893269265} +{"stream": "transcriptions", "data": {"sid": "TR717d2e6cc4419bff681cf317c789cc75", "date_created": "2021-09-14T15:18:03Z", "date_updated": "2021-09-14T15:18:03Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REd22429993d9b4098bdc73cf7b404cf6b", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR717d2e6cc4419bff681cf317c789cc75.json"}, "emitted_at": 1655893269265} +{"stream": "transcriptions", "data": {"sid": "TR2489281127d79daff48608d6de0ece24", "date_created": "2021-09-14T15:18:02Z", "date_updated": "2021-09-14T15:18:02Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE158fcda245f5cd98a9cfca4402c229cc", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR2489281127d79daff48608d6de0ece24.json"}, "emitted_at": 1655893269266} +{"stream": "transcriptions", "data": {"sid": "TR21e74421fe20ec64178df33dc112b5ca", "date_created": "2021-09-14T15:18:00Z", "date_updated": "2021-09-14T15:18:00Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE2e70e93627be86345e47ea8ddc5bed43", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR21e74421fe20ec64178df33dc112b5ca.json"}, "emitted_at": 1655893269266} +{"stream": "transcriptions", "data": {"sid": "TR6e4b47b91756ae71f259f629acf88fef", "date_created": "2021-09-14T15:17:58Z", "date_updated": "2021-09-14T15:17:58Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE042ed60bfd61d89162e03d06ee8b3c66", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR6e4b47b91756ae71f259f629acf88fef.json"}, "emitted_at": 1655893269267} +{"stream": "transcriptions", "data": {"sid": "TR4ccd1b81836ec97340977637874a4845", "date_created": "2021-09-14T15:17:57Z", "date_updated": "2021-09-14T15:17:57Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REec04d97a1c09df61f25a008e64b8aafd", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR4ccd1b81836ec97340977637874a4845.json"}, "emitted_at": 1655893269268} +{"stream": "transcriptions", "data": {"sid": "TR22ee0918045766948f6d9af1434958b3", "date_created": "2021-09-14T15:17:55Z", "date_updated": "2021-09-14T15:17:55Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REedfc971401390fc18ec3fd12119eb186", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR22ee0918045766948f6d9af1434958b3.json"}, "emitted_at": 1655893269268} +{"stream": "transcriptions", "data": {"sid": "TR6ba00ea99c5a2240fced873aba72a3e7", "date_created": "2021-09-14T15:17:53Z", "date_updated": "2021-09-14T15:17:54Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REd32d4c97d8f7623c40ef413d90ad87ef", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR6ba00ea99c5a2240fced873aba72a3e7.json"}, "emitted_at": 1655893269269} +{"stream": "transcriptions", "data": {"sid": "TR3d9a62fc0936ad52d59e8e5a1f94745a", "date_created": "2021-09-14T15:17:52Z", "date_updated": "2021-09-14T15:17:52Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REac65a1a5518e5c45c81d83aa533d6ca7", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR3d9a62fc0936ad52d59e8e5a1f94745a.json"}, "emitted_at": 1655893269269} +{"stream": "transcriptions", "data": {"sid": "TRb0f75fb83654581c4b11711b9d628859", "date_created": "2021-09-14T14:56:35Z", "date_updated": "2021-09-14T14:56:35Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE636fb5cd3d229c9a1f122070a47d4d71", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRb0f75fb83654581c4b11711b9d628859.json"}, "emitted_at": 1655893269270} +{"stream": "transcriptions", "data": {"sid": "TR113050137c84c177fdb291f2518025fb", "date_created": "2021-09-14T14:55:20Z", "date_updated": "2021-09-14T14:55:20Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REed2758be99a5ca27b36dfdf4e1edd0be", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR113050137c84c177fdb291f2518025fb.json"}, "emitted_at": 1655893269271} +{"stream": "transcriptions", "data": {"sid": "TR0e37434ec6fe9aa9d906bb4a944ef014", "date_created": "2021-09-14T14:55:18Z", "date_updated": "2021-09-14T15:08:55Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "completed", "type": "fast", "recording_sid": "RE175431544d95dc38abd010831642fc1e", "duration": 4, "transcription_text": "", "api_version": "2010-04-01", "price": -0.05, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR0e37434ec6fe9aa9d906bb4a944ef014.json"}, "emitted_at": 1655893269271} +{"stream": "transcriptions", "data": {"sid": "TRe8998e0f9cbfbf6123e71680320c1080", "date_created": "2021-09-14T14:55:14Z", "date_updated": "2021-09-14T14:55:14Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE9a7989c35ba53c9b97564b568bcb679e", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRe8998e0f9cbfbf6123e71680320c1080.json"}, "emitted_at": 1655893269272} +{"stream": "transcriptions", "data": {"sid": "TR9efca3e8db3396a4dada6f4fc30e3c56", "date_created": "2021-09-14T14:55:13Z", "date_updated": "2021-09-14T14:55:13Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE38e09877b5fb4b33b84c5d0a0821fb38", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR9efca3e8db3396a4dada6f4fc30e3c56.json"}, "emitted_at": 1655893269273} +{"stream": "transcriptions", "data": {"sid": "TR85eb7cc7e5146af795c1d7fc46084f1a", "date_created": "2021-09-14T14:55:11Z", "date_updated": "2021-09-14T14:55:11Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE9e41da230d7d4a16be9ba4a43e2f0e02", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR85eb7cc7e5146af795c1d7fc46084f1a.json"}, "emitted_at": 1655893269273} +{"stream": "transcriptions", "data": {"sid": "TR8fe28d60db1478a1a51ef667a9551286", "date_created": "2021-09-14T14:55:09Z", "date_updated": "2021-09-14T14:55:10Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE8bf9702bab133de98f34eaf48e030924", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR8fe28d60db1478a1a51ef667a9551286.json"}, "emitted_at": 1655893269274} +{"stream": "transcriptions", "data": {"sid": "TR3453b8924e3c031508ebdd464198d0e2", "date_created": "2021-09-14T14:55:08Z", "date_updated": "2021-09-14T14:55:08Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REaec4a8d1768924a5ff4f94c3f12b79db", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR3453b8924e3c031508ebdd464198d0e2.json"}, "emitted_at": 1655893269274} +{"stream": "transcriptions", "data": {"sid": "TR20bacb993cbd204b9d4f575ab834e93a", "date_created": "2021-09-14T14:55:06Z", "date_updated": "2021-09-14T15:02:44Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "completed", "type": "fast", "recording_sid": "REa02578180a641080b92d7df7f4aade3b", "duration": 3, "transcription_text": "", "api_version": "2010-04-01", "price": -0.05, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR20bacb993cbd204b9d4f575ab834e93a.json"}, "emitted_at": 1655893269275} +{"stream": "transcriptions", "data": {"sid": "TR8a278159cbe0b0bcddb94a41b30685ae", "date_created": "2021-09-14T14:55:03Z", "date_updated": "2021-09-14T15:09:30Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "completed", "type": "fast", "recording_sid": "RE4013b47db61ba88fa3b9832c8dddf9fc", "duration": 3, "transcription_text": "", "api_version": "2010-04-01", "price": -0.05, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR8a278159cbe0b0bcddb94a41b30685ae.json"}, "emitted_at": 1655893269276} +{"stream": "transcriptions", "data": {"sid": "TR9467ca23b2edfaa26b7937d2434f852a", "date_created": "2021-09-14T14:55:00Z", "date_updated": "2021-09-14T14:55:00Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE87f39a2cbfdd6dfe3b52ba81cadf518d", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR9467ca23b2edfaa26b7937d2434f852a.json"}, "emitted_at": 1655893269276} +{"stream": "transcriptions", "data": {"sid": "TR4d54044267ebfc95b9a07d894e119c04", "date_created": "2021-09-14T14:54:58Z", "date_updated": "2021-09-14T15:08:48Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "completed", "type": "fast", "recording_sid": "RE17caecf89aeb65291f6c46c479396fee", "duration": 4, "transcription_text": "", "api_version": "2010-04-01", "price": -0.05, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR4d54044267ebfc95b9a07d894e119c04.json"}, "emitted_at": 1655893269277} +{"stream": "transcriptions", "data": {"sid": "TR49f22bcb51f13208dc587fc3196cc27f", "date_created": "2021-09-14T14:54:54Z", "date_updated": "2021-09-14T14:54:54Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE0e32182bcff5d5d2a15eff341a8ca344", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR49f22bcb51f13208dc587fc3196cc27f.json"}, "emitted_at": 1655893269277} +{"stream": "transcriptions", "data": {"sid": "TRc48df5399fbc452bebd4a7827acb221d", "date_created": "2021-09-14T14:54:52Z", "date_updated": "2021-09-14T15:09:09Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "completed", "type": "fast", "recording_sid": "REa07d1e10a49ffd2f9e3fd3901042baa1", "duration": 3, "transcription_text": "", "api_version": "2010-04-01", "price": -0.05, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRc48df5399fbc452bebd4a7827acb221d.json"}, "emitted_at": 1655893269278} +{"stream": "transcriptions", "data": {"sid": "TR2c5e4dd3b601271750425432e7cfb7af", "date_created": "2021-09-14T14:54:49Z", "date_updated": "2021-09-14T15:08:32Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "completed", "type": "fast", "recording_sid": "RE6a5c6fe022ec6ecf9f709b8d87bc978b", "duration": 3, "transcription_text": "", "api_version": "2010-04-01", "price": -0.05, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR2c5e4dd3b601271750425432e7cfb7af.json"}, "emitted_at": 1655893269279} +{"stream": "transcriptions", "data": {"sid": "TR6540303431e21c7f1af28cbc827fa484", "date_created": "2021-09-14T14:54:45Z", "date_updated": "2021-09-14T14:54:46Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE4d2dbeccec3757a45888e2809f211b22", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR6540303431e21c7f1af28cbc827fa484.json"}, "emitted_at": 1655893269279} +{"stream": "transcriptions", "data": {"sid": "TR3e68b59c8e03ba07c9c6673fc1c5cda0", "date_created": "2021-08-06T14:52:42Z", "date_updated": "2021-08-06T14:55:31Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "completed", "type": "fast", "recording_sid": "RE554fc12be518c7318411679536a393bb", "duration": 5, "transcription_text": "Yes.", "api_version": "2010-04-01", "price": -0.05, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR3e68b59c8e03ba07c9c6673fc1c5cda0.json"}, "emitted_at": 1655893269280} +{"stream": "transcriptions", "data": {"sid": "TRf52892b14707484416689f7f4773e18d", "date_created": "2021-08-04T19:22:13Z", "date_updated": "2021-08-04T19:22:13Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE4cd452cfa8897ff0ffb8c96d25fe00cf", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRf52892b14707484416689f7f4773e18d.json"}, "emitted_at": 1655893269280} +{"stream": "transcriptions", "data": {"sid": "TRa2a755d1aaf94b3026412ead34b7385f", "date_created": "2021-08-04T19:22:11Z", "date_updated": "2021-08-04T19:22:11Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE56f58aab50da5ab12ad931815ab53f90", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRa2a755d1aaf94b3026412ead34b7385f.json"}, "emitted_at": 1655893269281} +{"stream": "transcriptions", "data": {"sid": "TR1fc10a8bfa44065cb5a4ebe6989b48a1", "date_created": "2021-08-02T16:34:18Z", "date_updated": "2021-08-02T16:34:18Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE4d0ef6c83919e750437eded31c290948", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR1fc10a8bfa44065cb5a4ebe6989b48a1.json"}, "emitted_at": 1655893269282} +{"stream": "transcriptions", "data": {"sid": "TR74961ade06a92971a8a37d0a86fd93b2", "date_created": "2021-08-02T16:34:17Z", "date_updated": "2021-08-02T16:34:17Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REe8218977962180ba324ed4295871ef58", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR74961ade06a92971a8a37d0a86fd93b2.json"}, "emitted_at": 1655893269282} +{"stream": "transcriptions", "data": {"sid": "TR1d82a640576ae85d115e3744bbee5bf2", "date_created": "2021-08-02T16:34:15Z", "date_updated": "2021-08-02T16:34:16Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REee3241ee410eed4705cf5a6f0fea3351", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR1d82a640576ae85d115e3744bbee5bf2.json"}, "emitted_at": 1655893269553} +{"stream": "transcriptions", "data": {"sid": "TRd560964d158e9b09a4e2c0f4a81d056e", "date_created": "2021-08-02T16:34:13Z", "date_updated": "2021-08-02T16:34:14Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE99cc53185b729dd8678ad428ed9e4b1d", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRd560964d158e9b09a4e2c0f4a81d056e.json"}, "emitted_at": 1655893269556} +{"stream": "transcriptions", "data": {"sid": "TR9a67ff36bd22ca23ffb8cec8758aee81", "date_created": "2021-08-02T16:34:12Z", "date_updated": "2021-08-02T16:34:12Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE23b0a6283906e32644f0ad625b3b2f76", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR9a67ff36bd22ca23ffb8cec8758aee81.json"}, "emitted_at": 1655893269559} +{"stream": "transcriptions", "data": {"sid": "TR22410638faca1ff28ba74fdcc3ed607a", "date_created": "2021-08-02T16:34:11Z", "date_updated": "2021-08-02T16:34:11Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE409806a6812289b99a200cb9b2ed1bb1", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR22410638faca1ff28ba74fdcc3ed607a.json"}, "emitted_at": 1655893269561} +{"stream": "transcriptions", "data": {"sid": "TR3121d0382edce18b7eefc91774d13b1e", "date_created": "2021-07-29T13:53:28Z", "date_updated": "2021-07-29T13:53:28Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE5789dbe47095da92fa15bb6a915ebf5f", "duration": 0, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR3121d0382edce18b7eefc91774d13b1e.json"}, "emitted_at": 1655893269564} +{"stream": "transcriptions", "data": {"sid": "TRe059ae09e0d44c9ff52cfae00bc310b4", "date_created": "2021-07-29T13:53:26Z", "date_updated": "2021-07-29T13:53:27Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REfff111761f95ca275dbb67a315c3dbd7", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRe059ae09e0d44c9ff52cfae00bc310b4.json"}, "emitted_at": 1655893269567} +{"stream": "transcriptions", "data": {"sid": "TR56400459787840ddd4c02324cd3e5aee", "date_created": "2021-07-29T13:53:25Z", "date_updated": "2021-07-29T13:53:26Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE50a8df46c6879f5cce93f0b1f060938f", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR56400459787840ddd4c02324cd3e5aee.json"}, "emitted_at": 1655893269570} +{"stream": "transcriptions", "data": {"sid": "TRcaee7459d0f781733be2c5e727f03ba4", "date_created": "2021-07-29T13:53:24Z", "date_updated": "2021-07-29T13:53:24Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE32716541fb1bf7c109475ae3bc8ca19d", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRcaee7459d0f781733be2c5e727f03ba4.json"}, "emitted_at": 1655893269573} +{"stream": "transcriptions", "data": {"sid": "TR336cfdfe1ae60c04583928bf4b32fcc8", "date_created": "2021-07-29T13:53:22Z", "date_updated": "2021-07-29T13:53:23Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE84c01315742ad6e01fcea4f59f6f60a4", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR336cfdfe1ae60c04583928bf4b32fcc8.json"}, "emitted_at": 1655893269576} +{"stream": "transcriptions", "data": {"sid": "TRb7f47a8dfbadc07d5cba352922b76a33", "date_created": "2021-07-29T13:53:20Z", "date_updated": "2021-07-29T13:53:20Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE777482bc179ca9370f8fc3c6ccf2f53b", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRb7f47a8dfbadc07d5cba352922b76a33.json"}, "emitted_at": 1655893269579} +{"stream": "transcriptions", "data": {"sid": "TRc0fb7617fdddff4ce7bbb2bb33bfba3e", "date_created": "2021-07-29T13:53:19Z", "date_updated": "2021-07-29T13:53:19Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REbfa96c9e5bcf67c271fd30915eab24f9", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRc0fb7617fdddff4ce7bbb2bb33bfba3e.json"}, "emitted_at": 1655893269582} +{"stream": "transcriptions", "data": {"sid": "TR6b677c0b8f4197bb5dfafa4080733c59", "date_created": "2021-07-29T13:53:17Z", "date_updated": "2021-07-29T13:53:18Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE5d36303c45aefc31cc866f4463aed1ff", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR6b677c0b8f4197bb5dfafa4080733c59.json"}, "emitted_at": 1655893269584} +{"stream": "transcriptions", "data": {"sid": "TR29feed340a2ddba3489ffa895cbadeb3", "date_created": "2021-07-29T13:53:16Z", "date_updated": "2021-07-29T13:53:16Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE139c1bba86bf76b9403198e2410cf79f", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR29feed340a2ddba3489ffa895cbadeb3.json"}, "emitted_at": 1655893269586} +{"stream": "transcriptions", "data": {"sid": "TR60181fc05d8f2c904c91fca3069a7294", "date_created": "2021-07-29T13:53:14Z", "date_updated": "2021-07-29T13:53:14Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE94c74f2fce718b632fe27ff263159564", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR60181fc05d8f2c904c91fca3069a7294.json"}, "emitted_at": 1655893269588} +{"stream": "transcriptions", "data": {"sid": "TR77d81ea746e99d1f0f65a3bb861496c8", "date_created": "2021-07-29T13:53:12Z", "date_updated": "2021-07-29T13:53:13Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE4e0201e11304eed129517f14e831e932", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR77d81ea746e99d1f0f65a3bb861496c8.json"}, "emitted_at": 1655893269589} +{"stream": "transcriptions", "data": {"sid": "TRc150c6839a4b238a4b58b8bcf22e073d", "date_created": "2021-07-29T13:53:11Z", "date_updated": "2021-07-29T13:53:11Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REe92c25eaa479a1d143039db6e98057e9", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRc150c6839a4b238a4b58b8bcf22e073d.json"}, "emitted_at": 1655893269591} +{"stream": "transcriptions", "data": {"sid": "TR2352ad9938745bd78d8ced13d995d181", "date_created": "2021-07-29T13:53:09Z", "date_updated": "2021-07-29T13:53:09Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REe3fc7d29689f05b28fae118d938b0371", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR2352ad9938745bd78d8ced13d995d181.json"}, "emitted_at": 1655893269592} +{"stream": "transcriptions", "data": {"sid": "TR7f944821808b0a30a576efc6b3719e96", "date_created": "2021-07-29T13:53:08Z", "date_updated": "2021-07-29T13:53:08Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE07aef2a985f2e5309f9a9c86a98d9a31", "duration": 1, "transcription_text": null, "api_version": "2010-04-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR7f944821808b0a30a576efc6b3719e96.json"}, "emitted_at": 1655893269593} +{"stream": "transcriptions", "data": {"sid": "TR1eb2d4019b539d7e90fa1af5b90bbc30", "date_created": "2021-07-23T07:49:38Z", "date_updated": "2021-07-23T07:49:48Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "completed", "type": "fast", "recording_sid": "REc64784b9bf55693654c8ebdb446c3ec8", "duration": 15, "transcription_text": "Amazons verification service, your code is 624404. Again, your code is 624404. Good by.", "api_version": "2010-04-01", "price": -0.05, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR1eb2d4019b539d7e90fa1af5b90bbc30.json"}, "emitted_at": 1655893269595} +{"stream": "transcriptions", "data": {"sid": "TR03e0c370bbbaa22d7721626f55caa843", "date_created": "2021-07-16T22:22:54Z", "date_updated": "2021-07-16T22:22:54Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REd01027fa42e1462b8f59f1dcd7e1384a", "duration": 1, "transcription_text": null, "api_version": "2008-08-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR03e0c370bbbaa22d7721626f55caa843.json"}, "emitted_at": 1655893269596} +{"stream": "transcriptions", "data": {"sid": "TR77ee90ded9bb2dc3cb90405ead31380e", "date_created": "2021-07-16T22:22:52Z", "date_updated": "2021-07-16T22:22:52Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REa73b00cd27fc04e56663078b91614493", "duration": 1, "transcription_text": null, "api_version": "2008-08-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR77ee90ded9bb2dc3cb90405ead31380e.json"}, "emitted_at": 1655893269597} +{"stream": "transcriptions", "data": {"sid": "TR313e2a049c6d4e52cce876575c364415", "date_created": "2021-07-16T22:22:51Z", "date_updated": "2021-07-16T22:22:51Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REde51bdf33cfee3eff88fcda95f3a893d", "duration": 1, "transcription_text": null, "api_version": "2008-08-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR313e2a049c6d4e52cce876575c364415.json"}, "emitted_at": 1655893269598} +{"stream": "transcriptions", "data": {"sid": "TR17fa377e295583880b482d8ee6f86111", "date_created": "2021-07-16T22:22:49Z", "date_updated": "2021-07-16T22:22:50Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REa072804aa71c2ca48baf6ecf41237b4c", "duration": 1, "transcription_text": null, "api_version": "2008-08-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR17fa377e295583880b482d8ee6f86111.json"}, "emitted_at": 1655893269600} +{"stream": "transcriptions", "data": {"sid": "TR8482bb211e10f8afa83ac2ddc20233a2", "date_created": "2021-07-16T22:22:48Z", "date_updated": "2021-07-16T22:22:48Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE737136eccce0d0a26271cd99da974628", "duration": 1, "transcription_text": null, "api_version": "2008-08-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR8482bb211e10f8afa83ac2ddc20233a2.json"}, "emitted_at": 1655893269601} +{"stream": "transcriptions", "data": {"sid": "TR982c6fcbb73bc07184532e47aad9dbb0", "date_created": "2021-07-16T22:22:46Z", "date_updated": "2021-07-16T22:22:47Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REf447d261aed3ec3f83dae1e32293f4cc", "duration": 1, "transcription_text": null, "api_version": "2008-08-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR982c6fcbb73bc07184532e47aad9dbb0.json"}, "emitted_at": 1655893269602} +{"stream": "transcriptions", "data": {"sid": "TR475dbe2d0a38c57896b64d80b65a4436", "date_created": "2021-07-16T22:22:45Z", "date_updated": "2021-07-16T22:55:28Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "completed", "type": "fast", "recording_sid": "REc27298f55ce77ceba5e977b2ddaa9845", "duration": 18, "transcription_text": "Thank you for choosing hilton hotels. We would like to inform you that thanks to the friends and family rewards program, your membership withdrawn to receive a complimentary stay for further details, press one now to be placed on the do not call list, press 2. Now.", "api_version": "2008-08-01", "price": -0.05, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR475dbe2d0a38c57896b64d80b65a4436.json"}, "emitted_at": 1655893269603} +{"stream": "transcriptions", "data": {"sid": "TR2d859ef050b8e4e1ae7c2cb2ec7303a2", "date_created": "2021-07-12T17:19:05Z", "date_updated": "2021-07-12T17:19:06Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE63ad97b2c52254525b16cf10ed72dff1", "duration": 1, "transcription_text": null, "api_version": "2008-08-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR2d859ef050b8e4e1ae7c2cb2ec7303a2.json"}, "emitted_at": 1655893269604} +{"stream": "transcriptions", "data": {"sid": "TRa62a4e1b51e01c1bcb15f918bc1735ee", "date_created": "2021-07-06T17:47:20Z", "date_updated": "2021-07-06T17:47:20Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE1a1b8cce1c2d60b8663178b0e121ddcf", "duration": 1, "transcription_text": null, "api_version": "2008-08-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRa62a4e1b51e01c1bcb15f918bc1735ee.json"}, "emitted_at": 1655893269604} +{"stream": "transcriptions", "data": {"sid": "TR3beef0f4303834c5882f08f9ff21d7af", "date_created": "2021-07-06T17:47:17Z", "date_updated": "2021-07-06T17:47:18Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE0cc09f8533c1959fb938fe66d46b6c8c", "duration": 1, "transcription_text": null, "api_version": "2008-08-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR3beef0f4303834c5882f08f9ff21d7af.json"}, "emitted_at": 1655893269605} +{"stream": "transcriptions", "data": {"sid": "TR7819ae947b6e1e70f0f5c14420180818", "date_created": "2021-07-06T17:47:15Z", "date_updated": "2021-07-06T17:47:15Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE0f647b2dac66ff44c34bc8a034468917", "duration": 1, "transcription_text": null, "api_version": "2008-08-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR7819ae947b6e1e70f0f5c14420180818.json"}, "emitted_at": 1655893269606} +{"stream": "transcriptions", "data": {"sid": "TRf66bb05ea5ff267f4958da32d049c72e", "date_created": "2021-07-06T17:47:14Z", "date_updated": "2021-07-06T17:47:14Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE1537dc45ff34546c4f24446e6be98df7", "duration": 1, "transcription_text": null, "api_version": "2008-08-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRf66bb05ea5ff267f4958da32d049c72e.json"}, "emitted_at": 1655893269607} +{"stream": "transcriptions", "data": {"sid": "TR2b3a7c83a1d8177e674690138ffe1b12", "date_created": "2021-07-06T17:47:12Z", "date_updated": "2021-07-06T17:47:12Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REf3af71562151bf8e02d6b46b1ca0c6a1", "duration": 1, "transcription_text": null, "api_version": "2008-08-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR2b3a7c83a1d8177e674690138ffe1b12.json"}, "emitted_at": 1655893269608} +{"stream": "transcriptions", "data": {"sid": "TR283825c8d15de557ec3f98b1000035c3", "date_created": "2021-07-06T17:47:10Z", "date_updated": "2021-07-06T17:47:10Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REd9930278f06f6433480ee5adfaec275e", "duration": 1, "transcription_text": null, "api_version": "2008-08-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR283825c8d15de557ec3f98b1000035c3.json"}, "emitted_at": 1655893269608} +{"stream": "transcriptions", "data": {"sid": "TR9cf7b3634a9e7005e53ba11ea07c1f7d", "date_created": "2021-06-30T15:27:18Z", "date_updated": "2021-06-30T15:27:18Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REa6c280a59d2b564728654b84499f452a", "duration": 1, "transcription_text": null, "api_version": "2008-08-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR9cf7b3634a9e7005e53ba11ea07c1f7d.json"}, "emitted_at": 1655893269609} +{"stream": "transcriptions", "data": {"sid": "TR879089ce0ab0beab1334d8c8b543412b", "date_created": "2021-06-29T14:24:20Z", "date_updated": "2021-06-29T14:24:21Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE11212bf72d7ac4a478ba17b9fd74630c", "duration": 1, "transcription_text": null, "api_version": "2008-08-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR879089ce0ab0beab1334d8c8b543412b.json"}, "emitted_at": 1655893269610} +{"stream": "transcriptions", "data": {"sid": "TRfc80852bb0e29b2f1a0a2d40ab3d7c44", "date_created": "2021-06-29T14:24:17Z", "date_updated": "2021-06-29T14:24:17Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE52edccb946ee322f23000b6d172298da", "duration": 1, "transcription_text": null, "api_version": "2008-08-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRfc80852bb0e29b2f1a0a2d40ab3d7c44.json"}, "emitted_at": 1655893269611} +{"stream": "transcriptions", "data": {"sid": "TRd4937317275da6e4c8d444915007ddb5", "date_created": "2021-06-29T14:24:13Z", "date_updated": "2021-06-29T14:24:14Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE2615429b5a431803ef2deb60aada359f", "duration": 1, "transcription_text": null, "api_version": "2008-08-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRd4937317275da6e4c8d444915007ddb5.json"}, "emitted_at": 1655893269612} +{"stream": "transcriptions", "data": {"sid": "TR3fa47c0771332afa24d60cb3ac2f26a0", "date_created": "2021-06-29T14:24:10Z", "date_updated": "2021-06-29T14:24:10Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REf047ccb6b6bd6511695c059434a0ee5a", "duration": 1, "transcription_text": null, "api_version": "2008-08-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR3fa47c0771332afa24d60cb3ac2f26a0.json"}, "emitted_at": 1655893269612} +{"stream": "transcriptions", "data": {"sid": "TRaa2a47ae4e227ff06206ee61d6cf3d7d", "date_created": "2021-06-29T14:24:09Z", "date_updated": "2021-06-29T14:24:10Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REc7870c242b8db6e6a1306738b596cf61", "duration": 1, "transcription_text": null, "api_version": "2008-08-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRaa2a47ae4e227ff06206ee61d6cf3d7d.json"}, "emitted_at": 1655893269613} +{"stream": "transcriptions", "data": {"sid": "TR6d6f1f633989514404f7350d2e265b2d", "date_created": "2021-06-29T14:24:04Z", "date_updated": "2021-06-29T14:24:04Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REf448b276ea98ebf3d7b9caddca28c9c8", "duration": 1, "transcription_text": null, "api_version": "2008-08-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR6d6f1f633989514404f7350d2e265b2d.json"}, "emitted_at": 1655893269614} +{"stream": "transcriptions", "data": {"sid": "TRd3b63f96aa77aa4e81fa962c27f13c41", "date_created": "2021-06-29T14:23:59Z", "date_updated": "2021-06-29T14:23:59Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE8fc5006bb0e5ef1cf4778f86b0981fb2", "duration": 1, "transcription_text": null, "api_version": "2008-08-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TRd3b63f96aa77aa4e81fa962c27f13c41.json"}, "emitted_at": 1655893269614} +{"stream": "transcriptions", "data": {"sid": "TR269a0cf6c842a16c58910d09cc7ada73", "date_created": "2021-06-24T17:35:29Z", "date_updated": "2021-06-24T17:35:30Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REc164ae33a29012cc6ec6825eed535309", "duration": 1, "transcription_text": null, "api_version": "2008-08-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR269a0cf6c842a16c58910d09cc7ada73.json"}, "emitted_at": 1655893269615} +{"stream": "transcriptions", "data": {"sid": "TR33e8eefbaa1c510f712616a8dfa94d03", "date_created": "2021-06-24T17:35:27Z", "date_updated": "2021-06-24T17:35:27Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "REb50108e71aa1db05457d725839cfc53b", "duration": 1, "transcription_text": null, "api_version": "2008-08-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR33e8eefbaa1c510f712616a8dfa94d03.json"}, "emitted_at": 1655893269616} +{"stream": "transcriptions", "data": {"sid": "TR9e1c09eb90898441a1f4d0356bac9516", "date_created": "2021-06-24T17:35:26Z", "date_updated": "2021-06-24T17:35:27Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE35c31ba04fb700777a050c4ac9e3c141", "duration": 1, "transcription_text": null, "api_version": "2008-08-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR9e1c09eb90898441a1f4d0356bac9516.json"}, "emitted_at": 1655893269616} +{"stream": "transcriptions", "data": {"sid": "TR68d28cd2551c2c01922b79cb41ee5d51", "date_created": "2021-06-24T17:35:24Z", "date_updated": "2021-06-24T17:35:25Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE37d45d62a6b3f418dcb4c925d4e967cc", "duration": 1, "transcription_text": null, "api_version": "2008-08-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR68d28cd2551c2c01922b79cb41ee5d51.json"}, "emitted_at": 1655893269617} +{"stream": "transcriptions", "data": {"sid": "TR4eebd9a480634dab945972fc7f76b1cd", "date_created": "2021-06-17T15:34:34Z", "date_updated": "2021-06-17T15:53:53Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "completed", "type": "fast", "recording_sid": "RE1c6686eb72437bb252384e1b84c7ed7b", "duration": 3, "transcription_text": "Best here.", "api_version": "2008-08-01", "price": -0.05, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR4eebd9a480634dab945972fc7f76b1cd.json"}, "emitted_at": 1655893269618} +{"stream": "transcriptions", "data": {"sid": "TR5de772ec5e698c5d36d04ab2d9a641c2", "date_created": "2021-06-17T15:34:30Z", "date_updated": "2021-06-17T15:34:31Z", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "status": "failed", "type": "fast", "recording_sid": "RE26ad99320c5bacd3cb97b7ec8029c9a9", "duration": 2, "transcription_text": null, "api_version": "2008-08-01", "price": null, "price_unit": "USD", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Transcriptions/TR5de772ec5e698c5d36d04ab2d9a641c2.json"}, "emitted_at": 1655893269618} +{"stream": "queues", "data": {"date_updated": "2020-11-25T10:01:02Z", "current_size": 0, "friendly_name": "friendly_name_5", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Queues/QUbda7dcdeafaf6509b45c4a43e4c4519d.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "average_wait_time": 0, "sid": "QUbda7dcdeafaf6509b45c4a43e4c4519d", "date_created": "2020-11-25T10:01:02Z", "max_size": 100, "subresource_uris": {"members": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Queues/QUbda7dcdeafaf6509b45c4a43e4c4519d/Members.json"}}, "emitted_at": 1655893271229} +{"stream": "queues", "data": {"date_updated": "2020-11-25T10:01:01Z", "current_size": 0, "friendly_name": "friendly_name_4", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Queues/QU9d308605319c35298f9833888d13c1fb.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "average_wait_time": 0, "sid": "QU9d308605319c35298f9833888d13c1fb", "date_created": "2020-11-25T10:01:01Z", "max_size": 100, "subresource_uris": {"members": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Queues/QU9d308605319c35298f9833888d13c1fb/Members.json"}}, "emitted_at": 1655893271243} +{"stream": "queues", "data": {"date_updated": "2020-11-25T10:00:59Z", "current_size": 0, "friendly_name": "friendly_name_3", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Queues/QU7a9ca432cb8ed145439bf74c27a3b587.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "average_wait_time": 0, "sid": "QU7a9ca432cb8ed145439bf74c27a3b587", "date_created": "2020-11-25T10:00:59Z", "max_size": 100, "subresource_uris": {"members": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Queues/QU7a9ca432cb8ed145439bf74c27a3b587/Members.json"}}, "emitted_at": 1655893271246} +{"stream": "queues", "data": {"date_updated": "2020-11-25T10:00:57Z", "current_size": 0, "friendly_name": "friendly_name_2", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Queues/QU345bae62186d3e58ba1338d4b8a60456.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "average_wait_time": 0, "sid": "QU345bae62186d3e58ba1338d4b8a60456", "date_created": "2020-11-25T10:00:57Z", "max_size": 100, "subresource_uris": {"members": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Queues/QU345bae62186d3e58ba1338d4b8a60456/Members.json"}}, "emitted_at": 1655893271247} +{"stream": "queues", "data": {"date_updated": "2020-11-25T10:00:55Z", "current_size": 0, "friendly_name": "friendly_name_1", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Queues/QU5f9b1d245de682b4c3830689bfc8a484.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "average_wait_time": 0, "sid": "QU5f9b1d245de682b4c3830689bfc8a484", "date_created": "2020-11-25T10:00:55Z", "max_size": 100, "subresource_uris": {"members": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Queues/QU5f9b1d245de682b4c3830689bfc8a484/Members.json"}}, "emitted_at": 1655893271248} +{"stream": "messages", "data": {"body": "Your NetSuite verification code is 959946.", "num_segments": 1, "direction": "inbound", "from": "+15592037173", "date_updated": "2022-03-17T03:53:03Z", "price": -0.0075, "error_message": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/SMb098c5997dd7c5cebd3051d9eb7566c4.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "num_media": 0, "to": "+12056561170", "date_created": "2022-03-17T03:53:02Z", "status": "received", "sid": "SMb098c5997dd7c5cebd3051d9eb7566c4", "date_sent": "2022-03-17T03:53:03Z", "messaging_service_sid": null, "error_code": null, "price_unit": "USD", "api_version": "2010-04-01", "subresource_uris": {"media": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/SMb098c5997dd7c5cebd3051d9eb7566c4/Media.json", "feedback": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/SMb098c5997dd7c5cebd3051d9eb7566c4/Feedback.json"}}, "emitted_at": 1655893272971} +{"stream": "usage_triggers", "data": {"sid": "UT33bd2bf238d94863a609133da897d676", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "date_created": "2020-11-25T10:02:19Z", "date_updated": "2020-11-25T10:02:19Z", "date_fired": null, "friendly_name": null, "usage_category": "sms", "trigger_by": "usage", "recurring": "", "trigger_value": 1000.0, "current_value": 130.0, "callback_url": "http://www.example.com/", "callback_method": "POST", "usage_record_uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Usage/Records.json?Category=sms", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Usage/Triggers/UT33bd2bf238d94863a609133da897d676.json", "api_version": "2010-04-01"}, "emitted_at": 1655893322691} +{"stream": "usage_triggers", "data": {"sid": "UT3c3c157dcaf347829d5a0f75e97b572e", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "date_created": "2020-11-25T10:02:34Z", "date_updated": "2020-11-25T10:02:34Z", "date_fired": null, "friendly_name": null, "usage_category": "sms", "trigger_by": "usage", "recurring": "", "trigger_value": 999.0, "current_value": 130.0, "callback_url": "http://www.example.com/", "callback_method": "POST", "usage_record_uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Usage/Records.json?Category=sms", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Usage/Triggers/UT3c3c157dcaf347829d5a0f75e97b572e.json", "api_version": "2010-04-01"}, "emitted_at": 1655893322698} +{"stream": "usage_triggers", "data": {"sid": "UT7170996eff504647ac9f215222ee296f", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "date_created": "2020-11-25T10:02:41Z", "date_updated": "2020-11-25T10:02:41Z", "date_fired": null, "friendly_name": null, "usage_category": "sms", "trigger_by": "usage", "recurring": "", "trigger_value": 943.0, "current_value": 130.0, "callback_url": "http://www.example.com/", "callback_method": "POST", "usage_record_uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Usage/Records.json?Category=sms", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Usage/Triggers/UT7170996eff504647ac9f215222ee296f.json", "api_version": "2010-04-01"}, "emitted_at": 1655893322701} diff --git a/airbyte-integrations/connectors/source-twilio/integration_tests/no_empty_streams_catalog.json b/airbyte-integrations/connectors/source-twilio/integration_tests/no_empty_streams_catalog.json index 1de59155ead5..2db7a3a5d6ea 100644 --- a/airbyte-integrations/connectors/source-twilio/integration_tests/no_empty_streams_catalog.json +++ b/airbyte-integrations/connectors/source-twilio/integration_tests/no_empty_streams_catalog.json @@ -99,9 +99,9 @@ "json_schema": {}, "supported_sync_modes": ["incremental", "full_refresh"], "source_defined_cursor": true, - "default_cursor_field": ["date_updated"] + "default_cursor_field": ["date_created"] }, - "cursor_field": ["date_updated"], + "cursor_field": ["date_created"], "sync_mode": "incremental", "destination_sync_mode": "append" }, @@ -188,6 +188,18 @@ }, "sync_mode": "full_refresh", "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "alerts", + "json_schema": {}, + "supported_sync_modes": ["incremental", "full_refresh"], + "source_defined_cursor": true, + "default_cursor_field": ["date_updated"] + }, + "sync_mode": "incremental", + "cursor_field": ["date_updated"], + "destination_sync_mode": "append" } ] } diff --git a/airbyte-integrations/connectors/source-twilio/integration_tests/no_empty_streams_no_usage_records_catalog.json b/airbyte-integrations/connectors/source-twilio/integration_tests/no_empty_streams_no_usage_records_catalog.json index 17f3e5c71557..978df0315ab2 100644 --- a/airbyte-integrations/connectors/source-twilio/integration_tests/no_empty_streams_no_usage_records_catalog.json +++ b/airbyte-integrations/connectors/source-twilio/integration_tests/no_empty_streams_no_usage_records_catalog.json @@ -99,9 +99,9 @@ "json_schema": {}, "supported_sync_modes": ["incremental", "full_refresh"], "source_defined_cursor": true, - "default_cursor_field": ["date_updated"] + "default_cursor_field": ["date_created"] }, - "cursor_field": ["date_updated"], + "cursor_field": ["date_created"], "sync_mode": "incremental", "destination_sync_mode": "append" }, @@ -183,9 +183,9 @@ "json_schema": {}, "supported_sync_modes": ["incremental", "full_refresh"], "source_defined_cursor": true, - "default_cursor_field": ["date_updated"] + "default_cursor_field": ["date_created"] }, - "cursor_field": ["date_updated"], + "cursor_field": ["date_created"], "sync_mode": "incremental", "destination_sync_mode": "append" } diff --git a/airbyte-integrations/connectors/source-twilio/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-twilio/integration_tests/sample_state.json index 862803a08b35..5b57d952ba89 100644 --- a/airbyte-integrations/connectors/source-twilio/integration_tests/sample_state.json +++ b/airbyte-integrations/connectors/source-twilio/integration_tests/sample_state.json @@ -1,6 +1,6 @@ { "calls": { - "end_time": "2020-01-01T00:00:00Z" + "end_time": "2022-06-11T00:00:00Z" }, "conferences": { "date_updated": "2020-01-01T00:00:00Z" diff --git a/airbyte-integrations/connectors/source-twilio/setup.py b/airbyte-integrations/connectors/source-twilio/setup.py index dae12a1b708f..9ddceb0419ef 100644 --- a/airbyte-integrations/connectors/source-twilio/setup.py +++ b/airbyte-integrations/connectors/source-twilio/setup.py @@ -11,9 +11,7 @@ "requests~=2.25", ] -TEST_REQUIREMENTS = [ - "pytest~=6.1", -] +TEST_REQUIREMENTS = ["pytest~=6.1", "pytest-mock"] setup( name="source_twilio", diff --git a/airbyte-integrations/connectors/source-twilio/source_twilio/schemas/conferences.json b/airbyte-integrations/connectors/source-twilio/source_twilio/schemas/conferences.json index 4d74793e4e09..b4887a0b3f86 100644 --- a/airbyte-integrations/connectors/source-twilio/source_twilio/schemas/conferences.json +++ b/airbyte-integrations/connectors/source-twilio/source_twilio/schemas/conferences.json @@ -8,7 +8,7 @@ "type": ["null", "string"] }, "date_updated": { - "format": "date", + "format": "date-time", "type": ["null", "string"] }, "api_version": { diff --git a/airbyte-integrations/connectors/source-twilio/source_twilio/schemas/recordings.json b/airbyte-integrations/connectors/source-twilio/source_twilio/schemas/recordings.json index 2949880e2d42..706d94c0df5d 100644 --- a/airbyte-integrations/connectors/source-twilio/source_twilio/schemas/recordings.json +++ b/airbyte-integrations/connectors/source-twilio/source_twilio/schemas/recordings.json @@ -48,9 +48,15 @@ "error_code": { "type": ["null", "integer"] }, + "media_url": { + "type": ["null", "string"] + }, "uri": { "type": ["null", "string"] }, + "media_url": { + "type": "string" + }, "encryption_details": { "properties": { "type": { diff --git a/airbyte-integrations/connectors/source-twilio/source_twilio/schemas/usage_records.json b/airbyte-integrations/connectors/source-twilio/source_twilio/schemas/usage_records.json index 8551b8ad3d5a..138c1edffcf1 100644 --- a/airbyte-integrations/connectors/source-twilio/source_twilio/schemas/usage_records.json +++ b/airbyte-integrations/connectors/source-twilio/source_twilio/schemas/usage_records.json @@ -62,7 +62,7 @@ "type": ["null", "number"] }, "start_date": { - "format": "date", + "format": "date-time", "type": ["null", "string"] }, "count_unit": { diff --git a/airbyte-integrations/connectors/source-twilio/source_twilio/source.py b/airbyte-integrations/connectors/source-twilio/source_twilio/source.py index 7ec8a481e7f1..9c6d23a6d960 100644 --- a/airbyte-integrations/connectors/source-twilio/source_twilio/source.py +++ b/airbyte-integrations/connectors/source-twilio/source_twilio/source.py @@ -61,7 +61,11 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: ), ) full_refresh_stream_kwargs = {"authenticator": auth} - incremental_stream_kwargs = {"authenticator": auth, "start_date": config["start_date"]} + incremental_stream_kwargs = { + "authenticator": auth, + "start_date": config["start_date"], + "lookback_window": config["lookback_window"], + } streams = [ Accounts(**full_refresh_stream_kwargs), diff --git a/airbyte-integrations/connectors/source-twilio/source_twilio/spec.json b/airbyte-integrations/connectors/source-twilio/source_twilio/spec.json index f5809c27fc36..182977df2d16 100644 --- a/airbyte-integrations/connectors/source-twilio/source_twilio/spec.json +++ b/airbyte-integrations/connectors/source-twilio/source_twilio/spec.json @@ -4,30 +4,51 @@ "$schema": "http://json-schema.org/draft-07/schema#", "title": "Twilio Spec", "type": "object", - "required": ["account_sid", "auth_token", "start_date"], + "required": [ + "account_sid", + "auth_token", + "start_date" + ], "additionalProperties": false, "properties": { "account_sid": { "title": "Account ID", "description": "Twilio account SID", "airbyte_secret": true, - "type": "string" + "type": "string", + "order": 1 }, "auth_token": { "title": "Auth Token", "description": "Twilio Auth Token.", "airbyte_secret": true, - "type": "string" + "type": "string", + "order": 2 }, "start_date": { "title": "Replication Start Date", "description": "UTC date and time in the format 2020-10-01T00:00:00Z. Any data before this date will not be replicated.", "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$", - "examples": ["2020-10-01T00:00:00Z"], - "type": "string" + "examples": [ + "2020-10-01T00:00:00Z" + ], + "type": "string", + "order": 3 + }, + "lookback_window": { + "title": "Lookback window", + "description": "How far into the past to look for records. (in minutes)", + "examples": [ + 60 + ], + "default": 0, + "type": "integer", + "order": 4 } } }, "supportsIncremental": true, - "supported_destination_sync_modes": ["append"] + "supported_destination_sync_modes": [ + "append" + ] } diff --git a/airbyte-integrations/connectors/source-twilio/source_twilio/streams.py b/airbyte-integrations/connectors/source-twilio/source_twilio/streams.py index 813055eb4345..79e85d7029da 100644 --- a/airbyte-integrations/connectors/source-twilio/source_twilio/streams.py +++ b/airbyte-integrations/connectors/source-twilio/source_twilio/streams.py @@ -3,12 +3,13 @@ # from abc import ABC, abstractmethod -from typing import Any, Iterable, Mapping, MutableMapping, Optional +from typing import Any, Iterable, List, Mapping, MutableMapping, Optional from urllib.parse import parse_qsl, urlparse import pendulum import requests from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources.streams import IncrementalMixin from airbyte_cdk.sources.streams.http import HttpStream from airbyte_cdk.sources.utils.transform import TransformConfig, TypeTransformer @@ -20,9 +21,12 @@ class TwilioStream(HttpStream, ABC): url_base = TWILIO_API_URL_BASE primary_key = "sid" - page_size = 100 + page_size = 1000 transformer: TypeTransformer = TypeTransformer(TransformConfig.DefaultSchemaNormalization | TransformConfig.CustomSchemaNormalization) + def __init__(self, **kwargs): + super().__init__(**kwargs) + @property def data_field(self): return self.name @@ -79,7 +83,7 @@ def request_params( @transformer.registerCustomTransform def custom_transform_function(original_value: Any, field_schema: Mapping[str, Any]) -> Any: - if original_value and "format" in field_schema and field_schema["format"] == "date-time": + if original_value and field_schema.get("format") == "date-time": try: return pendulum.from_format(original_value, "ddd, D MMM YYYY HH:mm:ss ZZ").in_timezone("UTC").to_iso8601_string() except ValueError: @@ -92,13 +96,14 @@ def custom_transform_function(original_value: Any, field_schema: Mapping[str, An return original_value -class IncrementalTwilioStream(TwilioStream, ABC): - cursor_field = "date_updated" - time_filter_template = "%Y-%m-%dT%H:%M:%SZ" +class IncrementalTwilioStream(TwilioStream, IncrementalMixin): + time_filter_template = "YYYY-MM-DD HH:mm:ss[Z]" - def __init__(self, start_date: str = None, **kwargs): + def __init__(self, start_date: str = None, lookback_window: int = 0, **kwargs): super().__init__(**kwargs) - self._start_date = start_date + self._start_date = start_date if start_date is not None else "1970-01-01T00:00:00Z" + self._lookback_window = lookback_window + self._cursor_value = None @property @abstractmethod @@ -107,29 +112,49 @@ def incremental_filter_field(self) -> str: return: date filter query parameter name """ - def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: - """ - Return the latest state by comparing the cursor value in the latest record with the stream's most recent state object - and returning an updated state object. - """ - latest_benchmark = pendulum.parse(latest_record[self.cursor_field], strict=False).strftime(self.time_filter_template) - if current_stream_state.get(self.cursor_field): - return {self.cursor_field: max(latest_benchmark, current_stream_state[self.cursor_field])} - return {self.cursor_field: latest_benchmark} - - def request_params(self, stream_state: Mapping[str, Any], **kwargs) -> MutableMapping[str, Any]: - params = super().request_params(stream_state=stream_state, **kwargs) - start_date = stream_state.get(self.cursor_field) or self._start_date - if start_date: - params.update({self.incremental_filter_field: pendulum.parse(start_date, strict=False).strftime(self.time_filter_template)}) + @property + def state(self) -> Mapping[str, Any]: + if self._cursor_value: + return { + self.cursor_field: self._cursor_value, + } + + return {} + + @state.setter + def state(self, value: Mapping[str, Any]): + if self._lookback_window and value.get(self.cursor_field): + new_start_date = ( + pendulum.parse(value[self.cursor_field]) - pendulum.duration(minutes=self._lookback_window) + ).to_iso8601_string() + if new_start_date > self._start_date: + value[self.cursor_field] = new_start_date + self._cursor_value = value.get(self.cursor_field) + + def request_params( + self, stream_state: Mapping[str, Any], next_page_token: Mapping[str, Any] = None, **kwargs + ) -> MutableMapping[str, Any]: + params = super().request_params(stream_state=stream_state, next_page_token=next_page_token, **kwargs) + start_date = self.state.get(self.cursor_field, self._start_date) + params[self.incremental_filter_field] = pendulum.parse(start_date).format(self.time_filter_template) return params - def read_records(self, stream_state: Mapping[str, Any] = None, **kwargs): - stream_state = stream_state or {} - records = super().read_records(stream_state=stream_state, **kwargs) - for record in records: - record[self.cursor_field] = pendulum.parse(record[self.cursor_field], strict=False).strftime(self.time_filter_template) - yield record + def read_records( + self, + sync_mode: SyncMode, + cursor_field: List[str] = None, + stream_slice: Mapping[str, Any] = None, + stream_state: Mapping[str, Any] = None, + ) -> Iterable[Mapping[str, Any]]: + unsorted_records = [] + for record in super().read_records(sync_mode, cursor_field, stream_slice, stream_state): + record[self.cursor_field] = pendulum.parse(record[self.cursor_field], strict=False).to_iso8601_string() + unsorted_records.append(record) + sorted_records = sorted(unsorted_records, key=lambda x: x[self.cursor_field]) + for record in sorted_records: + if record[self.cursor_field] >= self.state.get(self.cursor_field, self._start_date): + self._cursor_value = record[self.cursor_field] + yield record class TwilioNestedStream(TwilioStream): @@ -267,25 +292,16 @@ class Calls(TwilioNestedStream, IncrementalTwilioStream): parent_stream = Accounts incremental_filter_field = "EndTime>" cursor_field = "end_time" + time_filter_template = "YYYY-MM-DD" class Conferences(TwilioNestedStream, IncrementalTwilioStream): """https://www.twilio.com/docs/voice/api/conference-resource#read-multiple-conference-resources""" parent_stream = Accounts - incremental_filter_field = "DateUpdated>" - time_filter_template = "%Y-%m-%d" - - def parse_response(self, response: requests.Response, stream_state: Mapping[str, Any], **kwargs) -> Iterable[Mapping]: - """ - :return an iterable containing each record in the response - """ - records = response.json().get(self.data_field, []) - if stream_state.get(self.cursor_field): - for record in records: - if pendulum.parse(record[self.cursor_field], strict=False) <= pendulum.parse(stream_state[self.cursor_field], strict=False): - yield record - yield from records + incremental_filter_field = "DateCreated>" + cursor_field = "date_created" + time_filter_template = "YYYY-MM-DD" class ConferenceParticipants(TwilioNestedStream): @@ -345,6 +361,24 @@ class MessageMedia(TwilioNestedStream, IncrementalTwilioStream): incremental_filter_field = "DateCreated>" cursor_field = "date_created" + def stream_slices(self, **kwargs) -> Iterable[Optional[Mapping[str, any]]]: + stream_instance = self.parent_stream( + authenticator=self.authenticator, start_date=self._start_date, lookback_window=self._lookback_window + ) + stream_slices = stream_instance.stream_slices(sync_mode=SyncMode.full_refresh, cursor_field=stream_instance.cursor_field) + for stream_slice in stream_slices: + for item in stream_instance.read_records( + sync_mode=SyncMode.full_refresh, stream_slice=stream_slice, cursor_field=stream_instance.cursor_field + ): + if item.get("subresource_uris", {}).get(self.subresource_uri_key): + validated = True + for key, value in self.media_exist_validation.items(): + validated = item.get(key) and item.get(key) != value + if not validated: + break + if validated: + yield {"subresource_uri": item["subresource_uris"][self.subresource_uri_key]} + class UsageNestedStream(TwilioNestedStream): url_base = TWILIO_API_URL_BASE_VERSIONED @@ -374,8 +408,8 @@ class UsageRecords(UsageNestedStream, IncrementalTwilioStream): parent_stream = Accounts incremental_filter_field = "StartDate" - time_filter_template = "%Y-%m-%d" cursor_field = "start_date" + time_filter_template = "YYYY-MM-DD" path_name = "Records" primary_key = [["account_sid"], ["category"]] changeable_fields = ["as_of"] @@ -394,6 +428,7 @@ class Alerts(IncrementalTwilioStream): url_base = TWILIO_MONITOR_URL_BASE incremental_filter_field = "StartDate" + cursor_field = "date_generated" def path(self, **kwargs): return self.name.title() diff --git a/airbyte-integrations/connectors/source-twilio/unit_tests/unit_test.py b/airbyte-integrations/connectors/source-twilio/unit_tests/unit_test.py index dddaea0060fa..388210b93540 100644 --- a/airbyte-integrations/connectors/source-twilio/unit_tests/unit_test.py +++ b/airbyte-integrations/connectors/source-twilio/unit_tests/unit_test.py @@ -2,6 +2,39 @@ # Copyright (c) 2022 Airbyte, Inc., all rights reserved. # +from unittest.mock import Mock -def test_example_method(): - assert True +import pytest +import requests +from source_twilio.source import SourceTwilio + + +@pytest.fixture +def config(): + return {"account_sid": "airbyte.io", "auth_token": "secret", "start_date": "2022-01-01T00:00:00Z"} + + +@pytest.mark.parametrize( + "exception, expected_error_msg", + ( + ( + ConnectionError("Connection aborted"), + "Unable to connect to Twilio API with the provided credentials - ConnectionError('Connection aborted')", + ), + ( + TimeoutError("Socket timed out"), + "Unable to connect to Twilio API with the provided credentials - TimeoutError('Socket timed out')", + ), + ( + requests.exceptions.HTTPError("401 Client Error: Unauthorized for url: https://api.twilio.com/"), + "Unable to connect to Twilio API with the provided credentials - " + "HTTPError('401 Client Error: Unauthorized for url: https://api.twilio.com/')", + ), + ), +) +def test_check_connection_handles_exceptions(mocker, config, exception, expected_error_msg): + mocker.patch.object(requests.Session, "send", Mock(side_effect=exception)) + source = SourceTwilio() + status_ok, error = source.check_connection(logger=None, config=config) + assert not status_ok + assert error == expected_error_msg diff --git a/airbyte-integrations/connectors/source-typeform/Dockerfile b/airbyte-integrations/connectors/source-typeform/Dockerfile index b1062f9930a5..9c3a1a6a5c64 100644 --- a/airbyte-integrations/connectors/source-typeform/Dockerfile +++ b/airbyte-integrations/connectors/source-typeform/Dockerfile @@ -12,5 +12,5 @@ RUN pip install . ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.6 +LABEL io.airbyte.version=0.1.7 LABEL io.airbyte.name=airbyte/source-typeform diff --git a/airbyte-integrations/connectors/source-typeform/source_typeform/schemas/responses.json b/airbyte-integrations/connectors/source-typeform/source_typeform/schemas/responses.json index 0b87b257f4f7..88c145066497 100644 --- a/airbyte-integrations/connectors/source-typeform/source_typeform/schemas/responses.json +++ b/airbyte-integrations/connectors/source-typeform/source_typeform/schemas/responses.json @@ -110,6 +110,45 @@ } } } + }, + "number": { + "type": ["null", "number"] + }, + "date": { + "type": ["null", "string"], + "format": "date-time" + }, + "email": { + "type": ["null", "string"] + }, + "phone_number": { + "type": ["null", "string"] + }, + "boolean": { + "type": ["null", "boolean"] + }, + "file_url": { + "type": ["null", "string"] + }, + "url": { + "type": ["null", "string"] + }, + "payment": { + "type": ["null", "object"], + "properties": { + "amount": { + "type": ["null", "string"] + }, + "last4": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "success": { + "type": ["null", "boolean"] + } + } } } } diff --git a/airbyte-integrations/connectors/source-webflow/.dockerignore b/airbyte-integrations/connectors/source-webflow/.dockerignore new file mode 100644 index 000000000000..f0867b6ea8b1 --- /dev/null +++ b/airbyte-integrations/connectors/source-webflow/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_webflow +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-webflow/.gitignore b/airbyte-integrations/connectors/source-webflow/.gitignore new file mode 100644 index 000000000000..1d17dae13b53 --- /dev/null +++ b/airbyte-integrations/connectors/source-webflow/.gitignore @@ -0,0 +1 @@ +.venv diff --git a/airbyte-integrations/connectors/source-webflow/Dockerfile b/airbyte-integrations/connectors/source-webflow/Dockerfile new file mode 100644 index 000000000000..d41f6e3e21ff --- /dev/null +++ b/airbyte-integrations/connectors/source-webflow/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_webflow ./source_webflow + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.1 +LABEL io.airbyte.name=airbyte/source-webflow diff --git a/airbyte-integrations/connectors/source-webflow/README.md b/airbyte-integrations/connectors/source-webflow/README.md new file mode 100644 index 000000000000..9fdf25dced57 --- /dev/null +++ b/airbyte-integrations/connectors/source-webflow/README.md @@ -0,0 +1,141 @@ +# Webflow Source + +This is the repository for the Webflow source connector, written in Python. +For information about how to use this connector within Airbyte, see [Webflow source documentation](https://docs.airbyte.io/integrations/sources/webflow). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.9.11` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +pip install '.[tests]' +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-webflow:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/webflow) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_webflow/spec.yaml` file. +Note that any directory named `secrets` is git-ignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +For more information about creating Webflow credentials, see [the documentation](https://docs.airbyte.io/integrations/sources/webflow). + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source webflow test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py discover --config secrets/config.json +python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image. Execute the following from +the source-webflow project directory (where Dockerfile can be found): +``` +docker build . -t airbyte/source-webflow:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-webflow:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-webflow:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-webflow:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-webflow:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-webflow:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing +Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. +First install test dependencies into your virtual environment: +``` +pip install .[tests] +``` + +Or if you are running in OSX with zsh, you may need to execute the following instead +``` +pip install .'[tests]' +``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +python -m pytest unit_tests +``` + +### Integration Tests +There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). +#### Custom Integration tests +Place custom tests inside `integration_tests/` folder, then, from the connector root, run +``` +python -m pytest integration_tests +``` +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. +To run your integration tests with acceptance tests, from the connector root, run +``` +python -m pytest integration_tests -p integration_tests.acceptance +``` +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-webflow:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-webflow:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. + diff --git a/airbyte-integrations/connectors/source-webflow/acceptance-test-config.yml b/airbyte-integrations/connectors/source-webflow/acceptance-test-config.yml new file mode 100644 index 000000000000..8e6e6f5ae782 --- /dev/null +++ b/airbyte-integrations/connectors/source-webflow/acceptance-test-config.yml @@ -0,0 +1,19 @@ +# See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-webflow:dev +tests: + spec: + - spec_path: "source_webflow/spec.yaml" + connection: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + - config_path: "secrets/config.json" + basic_read: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + + diff --git a/airbyte-integrations/connectors/source-webflow/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-webflow/acceptance-test-docker.sh new file mode 100644 index 000000000000..c51577d10690 --- /dev/null +++ b/airbyte-integrations/connectors/source-webflow/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-webflow/build.gradle b/airbyte-integrations/connectors/source-webflow/build.gradle new file mode 100644 index 000000000000..a35d8aee048e --- /dev/null +++ b/airbyte-integrations/connectors/source-webflow/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_webflow' +} diff --git a/airbyte-integrations/connectors/source-webflow/integration_tests/__init__.py b/airbyte-integrations/connectors/source-webflow/integration_tests/__init__.py new file mode 100644 index 000000000000..46b7376756ec --- /dev/null +++ b/airbyte-integrations/connectors/source-webflow/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-webflow/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-webflow/integration_tests/abnormal_state.json new file mode 100644 index 000000000000..52b0f2c2118f --- /dev/null +++ b/airbyte-integrations/connectors/source-webflow/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "todo-abnormal-value" + } +} diff --git a/airbyte-integrations/connectors/source-webflow/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-webflow/integration_tests/acceptance.py new file mode 100644 index 000000000000..950b53b59d41 --- /dev/null +++ b/airbyte-integrations/connectors/source-webflow/integration_tests/acceptance.py @@ -0,0 +1,14 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + yield diff --git a/airbyte-integrations/connectors/source-webflow/integration_tests/catalog.json b/airbyte-integrations/connectors/source-webflow/integration_tests/catalog.json new file mode 100644 index 000000000000..0967ef424bce --- /dev/null +++ b/airbyte-integrations/connectors/source-webflow/integration_tests/catalog.json @@ -0,0 +1 @@ +{} diff --git a/airbyte-integrations/connectors/source-webflow/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-webflow/integration_tests/configured_catalog.json new file mode 100644 index 000000000000..c2887e81f620 --- /dev/null +++ b/airbyte-integrations/connectors/source-webflow/integration_tests/configured_catalog.json @@ -0,0 +1,12 @@ +{ + "streams": [ + { + "stream": { + "name": "Blog Authors", + "json_schema": {} + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-webflow/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-webflow/integration_tests/invalid_config.json new file mode 100644 index 000000000000..cdb9bc2f275b --- /dev/null +++ b/airbyte-integrations/connectors/source-webflow/integration_tests/invalid_config.json @@ -0,0 +1,4 @@ +{ + "site_id": "wrong data", + "api_key": "wrong data" +} diff --git a/airbyte-integrations/connectors/source-webflow/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-webflow/integration_tests/sample_config.json new file mode 100644 index 000000000000..2e7ab495a80c --- /dev/null +++ b/airbyte-integrations/connectors/source-webflow/integration_tests/sample_config.json @@ -0,0 +1,4 @@ +{ + "site_id": "your-webflow-site-id", + "api_key": "your-webflow-token" +} diff --git a/airbyte-integrations/connectors/source-webflow/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-webflow/integration_tests/sample_state.json new file mode 100644 index 000000000000..3587e579822d --- /dev/null +++ b/airbyte-integrations/connectors/source-webflow/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "value" + } +} diff --git a/airbyte-integrations/connectors/source-webflow/main.py b/airbyte-integrations/connectors/source-webflow/main.py new file mode 100644 index 000000000000..de7391914689 --- /dev/null +++ b/airbyte-integrations/connectors/source-webflow/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_webflow import SourceWebflow + +if __name__ == "__main__": + source = SourceWebflow() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-webflow/requirements.txt b/airbyte-integrations/connectors/source-webflow/requirements.txt new file mode 100644 index 000000000000..0411042aa091 --- /dev/null +++ b/airbyte-integrations/connectors/source-webflow/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-webflow/sample_files/configured_catalog.json b/airbyte-integrations/connectors/source-webflow/sample_files/configured_catalog.json new file mode 100644 index 000000000000..c2887e81f620 --- /dev/null +++ b/airbyte-integrations/connectors/source-webflow/sample_files/configured_catalog.json @@ -0,0 +1,12 @@ +{ + "streams": [ + { + "stream": { + "name": "Blog Authors", + "json_schema": {} + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-webflow/setup.py b/airbyte-integrations/connectors/source-webflow/setup.py new file mode 100644 index 000000000000..168f25863cf4 --- /dev/null +++ b/airbyte-integrations/connectors/source-webflow/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_webflow", + description="Source implementation for Webflow.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-webflow/source_webflow/__init__.py b/airbyte-integrations/connectors/source-webflow/source_webflow/__init__.py new file mode 100644 index 000000000000..2f6bd6e79775 --- /dev/null +++ b/airbyte-integrations/connectors/source-webflow/source_webflow/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceWebflow + +__all__ = ["SourceWebflow"] diff --git a/airbyte-integrations/connectors/source-webflow/source_webflow/auth.py b/airbyte-integrations/connectors/source-webflow/source_webflow/auth.py new file mode 100644 index 000000000000..59951ca45c95 --- /dev/null +++ b/airbyte-integrations/connectors/source-webflow/source_webflow/auth.py @@ -0,0 +1,28 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from typing import Any, Mapping + +from airbyte_cdk.sources.streams.http.requests_native_auth import TokenAuthenticator + + +class WebflowAuthMixin: + """ + Mixin class for providing additional HTTP header for specifying the "accept-version" + """ + + def __init__(self, *, accept_version_header: str = "accept-version", accept_version: str, **kwargs): + super().__init__(**kwargs) + self.accept_version = accept_version + self.accept_version_header = accept_version_header + + def get_auth_header(self) -> Mapping[str, Any]: + return {**super().get_auth_header(), self.accept_version_header: self.accept_version} + + +class WebflowTokenAuthenticator(WebflowAuthMixin, TokenAuthenticator): + """ + Auth class for Personal Access Token + https://help.getharvest.com/api-v2/authentication-api/authentication/authentication/#personal-access-tokens + """ diff --git a/airbyte-integrations/connectors/source-webflow/source_webflow/source.py b/airbyte-integrations/connectors/source-webflow/source_webflow/source.py new file mode 100644 index 000000000000..2dad6eb5671f --- /dev/null +++ b/airbyte-integrations/connectors/source-webflow/source_webflow/source.py @@ -0,0 +1,332 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import logging +from abc import ABC +from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple + +import requests +from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.streams import Stream +from airbyte_cdk.sources.streams.http import HttpStream + +from .auth import WebflowTokenAuthenticator +from .webflow_to_airbyte_mapping import WebflowToAirbyteMapping + +""" +This module is used for pulling the contents of "collections" out of Webflow, which is a CMS for hosting websites. +A Webflow collection may be a group of items such as "Blog Posts", "Blog Authors", etc. +There may be many collections, each of which can have its own distinct schema. This module will dynamically figure out +which collections are available, and will dynamically create the schema for each collection based on information +extracted from Webflow. It will then download all of the items from all of the selected collections. + +Because the amount of data is expected to be "small" (not TB of data), we have not implemented any kind of +incremental downloading of data from Webflow. Each time this code is exectued, it will pull back all of the items +that are contained in each of the desired collections. +""" + + +# Webflow expects a 'accept-version' header with a value of '1.0.0' (as of May 2022) +WEBFLOW_ACCEPT_VERSION = "1.0.0" + + +# Basic full refresh stream +class WebflowStream(HttpStream, ABC): + """ + This class represents a stream output by the connector. + This is an abstract base class meant to contain all the common functionality at the API level e.g: the API base URL, + pagination strategy, parsing responses etc.. + + Each stream should extend this class (or another abstract subclass of it) to specify behavior unique to that stream. + """ + + url_base = "https://api.webflow.com/" + + # The following call is need to fix what appears to be a bug in http.py line 119 + # Bug reported at: https://github.com/airbytehq/airbyte/issues/13283 + @property + def authenticator(self) -> WebflowTokenAuthenticator: + return self._session.auth + + def request_params( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, any] = None, next_page_token: Mapping[str, Any] = None + ) -> MutableMapping[str, Any]: + """ + Common params e.g. pagination size etc. + """ + return {} + + +class CollectionSchema(WebflowStream): + """ + Gets the schema of the current collection - see: https://developers.webflow.com/#get-collection-with-full-schema, and + then converts that schema to a json-schema.org-compatible schema that uses supported Airbyte types. + + More info about Webflow schema: https://developers.webflow.com/#get-collection-with-full-schema + Airbyte data types: https://docs.airbyte.com/understanding-airbyte/supported-data-types/ + """ + + # primary_key is not used as we don't do incremental syncs - https://docs.airbyte.com/understanding-airbyte/connections/ + primary_key = None + + def __init__(self, collection_id: str = None, **kwargs): + self.collection_id = collection_id + super().__init__(**kwargs) + + def path(self, **kwargs) -> str: + """ + See: https://developers.webflow.com/#list-collections + Returns a list which contains high-level information about each collection. + """ + + path = f"collections/{self.collection_id}" + return path + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + """ + Converts the webflow schema into an Airbyte-compatible schema + + Webflow schema API returns an array of fields contained in the "fields" field. + Get field name and field type from this array, and then map it to an airbyte-supported type + """ + + response_json = response.json() + for field in response_json["fields"]: + try: + field_name = field["slug"] + field_type = field["type"] + field_schema = {field_name: WebflowToAirbyteMapping.webflow_to_airbyte_mapping[field_type]} + yield field_schema # get records from the "fields" array + except Exception as e: + msg = f"""Encountered an exception parsing schema for Webflow type: {field_type}. +Is "{field_type}" defined in the mapping between Webflow and json schma ? """ + self.logger.exception(msg) + + # Don't eat the exception, raise it again as this needs to be fixed + raise e + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + """This API does not return any information to support pagination""" + return {} + + +class CollectionsList(WebflowStream): + """ + The data that we are generally interested in pulling from Webflow is stored in "Collections". + Example Collections that may be of interest are: "Blog Posts", "Blog Authors", etc. + + This class provides the functionality for getting a list containing metadata about available collections + More info https://developers.webflow.com/#list-collections + """ + + # primary_key is not used as we don't do incremental syncs - https://docs.airbyte.com/understanding-airbyte/connections/ + primary_key = None + + def __init__(self, site_id: str = None, **kwargs): + self.site_id = site_id + super().__init__(**kwargs) + + def path(self, **kwargs) -> str: + """ + See: https://developers.webflow.com/#list-collections + Returns a list which contains high-level information about each collection. + """ + + path = f"sites/{self.site_id}/collections" + return path + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + """ + This API returns a list containing json objects. So we can just yield each element from the list + """ + response_json = response.json() + yield from response_json + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + """This API does not return any information to support pagination""" + return {} + + +class CollectionContents(WebflowStream): + """ + This stream is used for pulling "items" out of a given Webflow collection. Because there is not a fixed number of collections with + pre-defined names, each stream is an object that uses the passed-in collection name for the stream name. + + Note that because the Webflow API works with collection ids rather than collection names, the collection id is + used for hitting the Webflow API. + + An example of a collection is "Blog Posts", which contains a list of items, where each item is a JSON-representation of a blog article. + """ + + # primary_key is not used as we don't do incremental syncs - https://docs.airbyte.com/understanding-airbyte/connections/ + primary_key = None + + # only want to create the name to id lookup table once + + def __init__(self, site_id: str = None, collection_id: str = None, collection_name: str = None, **kwargs): + """override __init__ to add collection-related variables""" + self.site_id = site_id + super().__init__(**kwargs) + self.collection_name = collection_name + self.collection_id = collection_id + + @property + def name(self) -> str: + return self.collection_name + + def path(self, **kwargs) -> str: + """ + The path to get the "items" in the requested collection uses the "_id" of the collection in the URL. + See: https://developers.webflow.com/#items + + return collections//items + """ + path = f"collections/{self.collection_id}/items" + return path + + def next_page_token(self, response: requests.Response) -> Mapping[str, Any]: + decoded_response = response.json() + if decoded_response.get("count", 0) != 0 and decoded_response.get("items", []) != []: + # Webflow uses an offset for pagination https://developers.webflow.com/#item-model + offset = decoded_response["offset"] + decoded_response["count"] + return {"offset": offset} + else: + return {} + + def request_params( + self, + stream_state: Mapping[str, Any], + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> MutableMapping[str, Any]: + + # Webflow default pagination is 100, for debugging pagination we set this to a low value. + # This should be set back to 100 for production + params = {"limit": 100} + + # Handle pagination by inserting the next page's token in the request parameters + if next_page_token: + params.update(next_page_token) + + return params + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + """ + Webflow items API returns an array of items contained in the "items" field. + """ + + response_json = response.json() + # The items API returns records inside a container list called "items" + for item in response_json["items"]: + yield item + + def get_json_schema(self) -> Mapping[str, Any]: + """ + Webflow has an API,but it is not consistent with json-schema.org schemas. We use the CollectionSchema stream + to get these schemas and to also map them to json-schema format. + """ + + collection_id = self.collection_id + schema_stream = CollectionSchema(authenticator=self.authenticator, collection_id=collection_id) + schema_records = schema_stream.read_records(sync_mode="full_refresh") + + # each record corresponds to a property in the json schema. So we loop over each of these properties + # and add it to the json schema. + json_schema = {} + for schema_property in schema_records: + json_schema.update(schema_property) + + return { + "$schema": "http://json-schema.org/draft-07/schema#", + "additionalProperties": True, + "type": "object", + "properties": json_schema, + } + + +class SourceWebflow(AbstractSource): + + """This is the main class that defines the methods that will be called by Airbyte infrastructure""" + + @staticmethod + def _get_collection_name_to_id_dict(authenticator: str = None, site_id: str = None) -> Mapping[str, str]: + """ + Most of the Webflow APIs require the collection id, but the streams that we are generating use the collection name. + This function will return a dictionary containing collection_name: collection_id entries. + """ + + collection_name_to_id_dict = {} + + collections_stream = CollectionsList(authenticator=authenticator, site_id=site_id) + collections_records = collections_stream.read_records(sync_mode="full_refresh") + + # Loop over the list of records and create a dictionary with name as key, and _id as value + for collection_obj in collections_records: + collection_name_to_id_dict[collection_obj["name"]] = collection_obj["_id"] + + return collection_name_to_id_dict + + @staticmethod + def get_authenticator(config): + """ + Verifies that the information for setting the header has been set, and returns a class + which overloads that standard authentication to include additional headers that are required by Webflow. + """ + api_key = config.get("api_key", None) + accept_version = WEBFLOW_ACCEPT_VERSION + if not api_key: + raise Exception("Config validation error: 'api_key' is a required property") + + auth = WebflowTokenAuthenticator(token=api_key, accept_version=accept_version) + return auth + + def check_connection(self, logger: logging.Logger, config: Mapping[str, Any]) -> Tuple[bool, any]: + """ + A check to validate that the user-provided config can be used to connect to the underlying API + + :param config: the user-input config object conforming to the connector's spec.yaml + :param logger: logger object + :return Tuple[bool, any]: (True, None) if the input config can be used to connect to the API successfully, (False, error) otherwise. + """ + + try: + # Check that authenticator can be retrieved + auth = self.get_authenticator(config) + site_id = config.get("site_id") + collections_stream = CollectionsList(authenticator=auth, site_id=site_id) + collections_records = collections_stream.read_records(sync_mode="full_refresh") + record = next(collections_records) + logger.info(f"Successfully connected to CollectionsList stream. Pulled one record: {record}") + return True, None + except Exception as e: + return False, e + + def generate_streams(self, authenticator: WebflowTokenAuthenticator, site_id: str) -> List[Stream]: + """Generates a list of stream by their names.""" + + collection_name_to_id_dict = self._get_collection_name_to_id_dict(authenticator=authenticator, site_id=site_id) + + for collection_name, collection_id in collection_name_to_id_dict.items(): + yield CollectionContents( + authenticator=authenticator, + site_id=site_id, + collection_id=collection_id, + collection_name=collection_name, + ) + + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + """ + :param config: A Mapping of the user input configuration as defined in the connector spec. + :return List[Stream]: A list/generator of the streams that Airbyte can pull data from. + """ + + auth = self.get_authenticator(config) + site_id = config.get("site_id") + + # Return a list (iterator) of the streams that will be available for use. + # We _dynamically_ generate streams that correspond to Webflow collections (eg. Blog Authors, Blog Posts, etc.) + streams = self.generate_streams(authenticator=auth, site_id=site_id) + + return streams diff --git a/airbyte-integrations/connectors/source-webflow/source_webflow/spec.yaml b/airbyte-integrations/connectors/source-webflow/source_webflow/spec.yaml new file mode 100644 index 000000000000..7a1754509df5 --- /dev/null +++ b/airbyte-integrations/connectors/source-webflow/source_webflow/spec.yaml @@ -0,0 +1,23 @@ +documentationUrl: https://docs.airbyte.io/integrations/sources/webflow +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Webflow Spec + type: object + required: + - api_key + - site_id + additionalProperties: false + properties: + site_id: + title: Site id + type: string + description: "The id of the Webflow site you are requesting data from. See https://developers.webflow.com/#sites" + example: "a relatively long hex sequence" + order: 0 + api_key: + title: API token + type: string + description: "The API token for authenticating to Webflow. See https://university.webflow.com/lesson/intro-to-the-webflow-api" + example: "a very long hex sequence" + order: 1 + airbyte_secret: true diff --git a/airbyte-integrations/connectors/source-webflow/source_webflow/webflow_to_airbyte_mapping.py b/airbyte-integrations/connectors/source-webflow/source_webflow/webflow_to_airbyte_mapping.py new file mode 100644 index 000000000000..d16b65fbd4fa --- /dev/null +++ b/airbyte-integrations/connectors/source-webflow/source_webflow/webflow_to_airbyte_mapping.py @@ -0,0 +1,33 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +class WebflowToAirbyteMapping: + + """ + The following disctionary is used for dynamically pulling the schema from Webflow, and mapping it to an Airbyte-compatible json-schema + Webflow: https://developers.webflow.com/#get-collection-with-full-schema + Airbyte/json-schema: https://docs.airbyte.com/understanding-airbyte/supported-data-types/ + """ + + webflow_to_airbyte_mapping = { + "Bool": {"type": ["null", "boolean"]}, + "Date": { + "type": ["null", "string"], + "format": "date-time", + }, + "Email": { + "type": ["null", "string"], + }, + "ImageRef": {"type": ["null", "object"], "additionalProperties": True}, + "ItemRef": {"type": ["null", "string"]}, + "ItemRefSet": {"type": ["null", "array"]}, + "Link": {"type": ["null", "string"]}, + "Number": {"type": ["null", "number"]}, + "Option": {"type": ["null", "string"]}, + "PlainText": {"type": ["null", "string"]}, + "RichText": {"type": ["null", "string"]}, + "User": {"type": ["null", "string"]}, + "Video": {"type": ["null", "string"]}, + } diff --git a/airbyte-integrations/connectors/source-webflow/unit_tests/__init__.py b/airbyte-integrations/connectors/source-webflow/unit_tests/__init__.py new file mode 100644 index 000000000000..46b7376756ec --- /dev/null +++ b/airbyte-integrations/connectors/source-webflow/unit_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-webflow/unit_tests/test_source.py b/airbyte-integrations/connectors/source-webflow/unit_tests/test_source.py new file mode 100644 index 000000000000..3964f6e5c205 --- /dev/null +++ b/airbyte-integrations/connectors/source-webflow/unit_tests/test_source.py @@ -0,0 +1,28 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from unittest import TestCase +from unittest.mock import MagicMock, patch + +from source_webflow.source import SourceWebflow + + +def test_check_connection(mocker): + source = SourceWebflow() + fake_info_record = {"collection": "is_mocked"} + with patch("source_webflow.source.CollectionsList.read_records", MagicMock(return_value=iter([fake_info_record]))): + logger_mock, config_mock = MagicMock(), MagicMock() + assert source.check_connection(logger_mock, config_mock) == (True, None) + logger_mock.info.assert_called_once() + my_regex = r"Successfully connected.*" + str(fake_info_record) + TestCase().assertRegex(logger_mock.method_calls[0].args[0], my_regex) + + +def test_streams(mocker): + # use the "with" to prevent the patch from impacting other tests + with patch("source_webflow.source.SourceWebflow.generate_streams", MagicMock(return_value=["This would be a stream"])): + source = SourceWebflow() + config_mock = MagicMock() + streams = source.streams(config_mock) + assert len(streams) == 1 diff --git a/airbyte-integrations/connectors/source-webflow/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-webflow/unit_tests/test_streams.py new file mode 100644 index 000000000000..8929f3a8d539 --- /dev/null +++ b/airbyte-integrations/connectors/source-webflow/unit_tests/test_streams.py @@ -0,0 +1,78 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from http import HTTPStatus +from unittest.mock import MagicMock + +import pytest +from source_webflow.source import CollectionContents, SourceWebflow, WebflowStream + + +@pytest.fixture +def patch_base_class(mocker): + # Mock abstract methods to enable instantiating abstract class + mocker.patch.object(WebflowStream, "path", "v0/example_endpoint") + mocker.patch.object(WebflowStream, "primary_key", "test_primary_key") + mocker.patch.object(WebflowStream, "__abstractmethods__", set()) + + +def test_request_params_of_collection_items(patch_base_class): + stream = CollectionContents() + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": {"offset": 1}} + expected_params = {"limit": 100, "offset": 1} + assert stream.request_params(**inputs) == expected_params + + +def test_next_page_token_of_collection_items(patch_base_class): + stream = CollectionContents() + response_data = {"items": [{"item1_key": "item1_val"}], "count": 10, "offset": 100} + inputs = {"response": MagicMock(json=lambda: response_data)} + expected_token = {"offset": 110} + assert stream.next_page_token(**inputs) == expected_token + + +def test_parse_response_of_collection_items(patch_base_class): + stream = CollectionContents() + mock_record = {"item1_key": "item1_val"} + response_data = {"items": [mock_record]} + inputs = {"response": MagicMock(json=lambda: response_data)} + parsed_item = next(stream.parse_response(**inputs)) + assert parsed_item == mock_record + + +def test_generate_streams(patch_base_class): + SourceWebflow._get_collection_name_to_id_dict = MagicMock(return_value={"name-1": "id-1", "name-2": "id-2"}) + source = SourceWebflow() + config_mock = MagicMock() + streams = source.generate_streams(config_mock, "fake site id") + assert len(list(streams)) == 2 + + +def test_http_method(patch_base_class): + stream = WebflowStream() + expected_method = "GET" + assert stream.http_method == expected_method + + +@pytest.mark.parametrize( + ("http_status", "should_retry"), + [ + (HTTPStatus.OK, False), + (HTTPStatus.BAD_REQUEST, False), + (HTTPStatus.TOO_MANY_REQUESTS, True), + (HTTPStatus.INTERNAL_SERVER_ERROR, True), + ], +) +def test_should_retry(patch_base_class, http_status, should_retry): + response_mock = MagicMock() + response_mock.status_code = http_status + stream = WebflowStream() + assert stream.should_retry(response_mock) == should_retry + + +def test_backoff_time(patch_base_class): + response_mock = MagicMock() + stream = WebflowStream() + expected_backoff_time = None + assert stream.backoff_time(response_mock) == expected_backoff_time diff --git a/airbyte-integrations/connectors/source-zendesk-chat/Dockerfile b/airbyte-integrations/connectors/source-zendesk-chat/Dockerfile index 09e257cb0283..fc5a117e99bc 100644 --- a/airbyte-integrations/connectors/source-zendesk-chat/Dockerfile +++ b/airbyte-integrations/connectors/source-zendesk-chat/Dockerfile @@ -16,5 +16,5 @@ RUN pip install . ENTRYPOINT ["python", "/airbyte/integration_code/main_dev.py"] -LABEL io.airbyte.version=0.1.7 +LABEL io.airbyte.version=0.1.8 LABEL io.airbyte.name=airbyte/source-zendesk-chat diff --git a/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/schemas/chats.json b/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/schemas/chats.json index e1d4f83877aa..ec9f815de63c 100644 --- a/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/schemas/chats.json +++ b/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/schemas/chats.json @@ -207,8 +207,43 @@ "additionalProperties": false }, "session": { - "type": ["null", "object"], - "additionalProperties": true + "properties": { + "end_date": { + "format": "date-time", + "type": ["null", "string"] + }, + "country_code": { + "type": ["null", "string"] + }, + "city": { + "type": ["null", "string"] + }, + "browser": { + "type": ["null", "string"] + }, + "ip": { + "type": ["null", "string"] + }, + "country_name": { + "type": ["null", "string"] + }, + "id": { + "type": ["null", "string"] + }, + "region": { + "type": ["null", "string"] + }, + "platform": { + "type": ["null", "string"] + }, + "user_agent": { + "type": ["null", "string"] + }, + "start_date": { + "format": "date-time", + "type": ["null", "string"] + } + } }, "started_by": { "type": ["null", "string"] diff --git a/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/streams.py b/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/streams.py index 3e5c93833de5..1ba4b454f819 100644 --- a/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/streams.py +++ b/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/streams.py @@ -104,6 +104,8 @@ def _field_to_datetime(value: Union[int, str]) -> pendulum.datetime: class TimeIncrementalStream(BaseIncrementalStream, ABC): + state_checkpoint_interval = 1000 + def __init__(self, start_date, **kwargs): super().__init__(**kwargs) self._start_date = pendulum.parse(start_date) @@ -186,6 +188,7 @@ class AgentTimelines(TimeIncrementalStream): cursor_field = "start_time" data_field = "agent_timeline" name = "agent_timeline" + limit = 1000 def request_params(self, **kwargs) -> MutableMapping[str, Any]: params = super().request_params(**kwargs) @@ -226,6 +229,7 @@ class Chats(TimeIncrementalStream): cursor_field = "update_timestamp" data_field = "chats" + limit = 1000 class Shortcuts(Stream): diff --git a/airbyte-integrations/connectors/source-zendesk-support/Dockerfile b/airbyte-integrations/connectors/source-zendesk-support/Dockerfile index f07d4a07ed6c..7525f19db6b2 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/Dockerfile +++ b/airbyte-integrations/connectors/source-zendesk-support/Dockerfile @@ -25,5 +25,5 @@ COPY source_zendesk_support ./source_zendesk_support ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.2.9 +LABEL io.airbyte.version=0.2.11 LABEL io.airbyte.name=airbyte/source-zendesk-support diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py index 1dcb8bab758a..7f3ece6f653a 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py @@ -3,6 +3,7 @@ # import calendar +import re import time from abc import ABC from collections import deque @@ -31,6 +32,15 @@ END_OF_STREAM_KEY: str = "end_of_stream" +def to_int(s): + "https://github.com/airbytehq/airbyte/issues/13673" + if isinstance(s, str): + res = re.findall(r"[-+]?\d+", s) + if res: + return res[0] + return s + + class SourceZendeskException(Exception): """default exception of custom SourceZendesk logic""" @@ -78,7 +88,7 @@ def backoff_time(self, response: requests.Response) -> Union[int, float]: The response has a Retry-After header that tells you for how many seconds to wait before retrying. """ - retry_after = int(response.headers.get("Retry-After", 0)) + retry_after = int(to_int(response.headers.get("Retry-After", 0))) if retry_after > 0: return retry_after @@ -336,6 +346,7 @@ class SourceZendeskSupportCursorPaginationStream(SourceZendeskSupportFullRefresh Endpoints provide a cursor pagination and sorting mechanism """ + cursor_field = "updated_at" next_page_field = "next_page" prev_start_time = None @@ -379,7 +390,6 @@ class SourceZendeskIncrementalExportStream(SourceZendeskSupportCursorPaginationS more info: https://developer.zendesk.com/documentation/ticketing/using-the-zendesk-api/side_loading/#supported-endpoints """ - cursor_field = "updated_at" response_list_name: str = None sideload_param: str = None @@ -483,30 +493,24 @@ class Groups(SourceZendeskSupportStream): class GroupMemberships(SourceZendeskSupportCursorPaginationStream): """GroupMemberships stream: https://developer.zendesk.com/api-reference/ticketing/groups/group_memberships/""" - cursor_field = "updated_at" - -class SatisfactionRatings(SourceZendeskSupportStream): - """SatisfactionRatings stream: https://developer.zendesk.com/api-reference/ticketing/ticket-management/satisfaction_ratings/ - - The ZenDesk API for this stream provides the filter "start_time" that can be used for incremental logic +class SatisfactionRatings(SourceZendeskSupportCursorPaginationStream): + """ + SatisfactionRatings stream: https://developer.zendesk.com/api-reference/ticketing/ticket-management/satisfaction_ratings/ """ + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + next_page = self._parse_next_page_number(response) + return next_page if next_page else None + def request_params( self, stream_state: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None, **kwargs ) -> MutableMapping[str, Any]: - """Adds the filtering field 'start_time'""" - params = super().request_params(stream_state=stream_state, next_page_token=next_page_token, **kwargs) + params = {"page": 1, "per_page": self.page_size, "sort_by": "asc"} start_time = self.str2unixtime((stream_state or {}).get(self.cursor_field)) - - if not start_time: - start_time = self.str2unixtime(self._start_date) - params.update( - { - "start_time": start_time, - "sort_by": "asc", - } - ) + params["start_time"] = start_time if start_time else self.str2unixtime(self._start_date) + if next_page_token: + params["page"] = next_page_token return params @@ -517,15 +521,31 @@ class TicketFields(SourceZendeskSupportStream): class TicketForms(SourceZendeskSupportCursorPaginationStream): """TicketForms stream: https://developer.zendesk.com/api-reference/ticketing/tickets/ticket_forms/""" - cursor_field = "updated_at" - -class TicketMetrics(SourceZendeskSupportStream): +class TicketMetrics(SourceZendeskSupportCursorPaginationStream): """TicketMetric stream: https://developer.zendesk.com/api-reference/ticketing/tickets/ticket_metrics/""" + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + next_page = self._parse_next_page_number(response) + return next_page if next_page else None + + def request_params( + self, stream_state: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None, **kwargs + ) -> MutableMapping[str, Any]: + params = { + "start_time": self.check_stream_state(stream_state), + "page": 1, + "per_page": self.page_size, + } + if next_page_token: + params["page"] = next_page_token + return params + class TicketMetricEvents(SourceZendeskSupportCursorPaginationStream): - """TicketMetricEvents stream: https://developer.zendesk.com/api-reference/ticketing/tickets/ticket_metric_events/""" + """ + TicketMetricEvents stream: https://developer.zendesk.com/api-reference/ticketing/tickets/ticket_metric_events/ + """ cursor_field = "time" diff --git a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/test_backoff_on_rate_limit.py b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/test_backoff_on_rate_limit.py index 19061300e77e..9f7a6c27e31f 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/test_backoff_on_rate_limit.py +++ b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/test_backoff_on_rate_limit.py @@ -25,11 +25,11 @@ def prepare_config(config: Dict): return SourceZendeskSupport().convert_config2stream_args(config) -def test_backoff(requests_mock, config): +@pytest.mark.parametrize("retry_after, expected", [("5", 5), ("5, 4", 5)]) +def test_backoff(requests_mock, config, retry_after, expected): """ """ - test_response_header = {"Retry-After": "5", "X-Rate-Limit": "0"} + test_response_header = {"Retry-After": retry_after, "X-Rate-Limit": "0"} test_response_json = {"count": {"value": 1, "refreshed_at": "2022-03-29T10:10:51+00:00"}} - expected = int(test_response_header.get("Retry-After")) # create client config = prepare_config(config) diff --git a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/unit_test.py b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/unit_test.py index b815ca81aef5..35537455fe54 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/unit_test.py +++ b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/unit_test.py @@ -15,7 +15,7 @@ import requests from airbyte_cdk import AirbyteLogger from source_zendesk_support.source import BasicApiTokenAuthenticator, SourceZendeskSupport -from source_zendesk_support.streams import ( # streams +from source_zendesk_support.streams import ( DATETIME_FORMAT, END_OF_STREAM_KEY, LAST_END_TIME_KEY, @@ -406,17 +406,13 @@ def test_get_updated_state(self, stream_cls, current_state, last_record, expecte (Macros, None), (Organizations, None), (Groups, None), - (SatisfactionRatings, None), (TicketFields, None), - (TicketMetrics, None), ], ids=[ "Macros", "Organizations", "Groups", - "SatisfactionRatings", "TicketFields", - "TicketMetrics", ], ) def test_next_page_token(self, stream_cls, expected): @@ -430,17 +426,13 @@ def test_next_page_token(self, stream_cls, expected): (Macros, {"start_time": 1622505600}), (Organizations, {"start_time": 1622505600}), (Groups, {"start_time": 1622505600}), - (SatisfactionRatings, {"start_time": 1622505600, "sort_by": "asc"}), (TicketFields, {"start_time": 1622505600}), - (TicketMetrics, {"start_time": 1622505600}), ], ids=[ "Macros", "Organizations", "Groups", - "SatisfactionRatings", "TicketFields", - "TicketMetrics", ], ) def test_request_params(self, stream_cls, expected): @@ -555,12 +547,16 @@ def test_get_updated_state(self, stream_cls, current_state, last_record, expecte (TicketForms), (TicketMetricEvents), (TicketAudits), + (TicketMetrics), + (SatisfactionRatings), ], ids=[ "GroupMemberships", "TicketForms", "TicketMetricEvents", "TicketAudits", + "TicketMetrics", + "SatisfactionRatings", ], ) def test_next_page_token(self, requests_mock, stream_cls): @@ -598,12 +594,16 @@ def test_check_stream_state(self, stream_cls, expected): (TicketForms, {"start_time": 1622505600}), (TicketMetricEvents, {"start_time": 1622505600}), (TicketAudits, {"sort_by": "created_at", "sort_order": "desc", "limit": 1000}), + (SatisfactionRatings, {"page": 1, "per_page": 100, "sort_by": "asc", "start_time": 1622505600}), + (TicketMetrics, {"page": 1, "per_page": 100, "start_time": 1622505600}), ], ids=[ "GroupMemberships", "TicketForms", "TicketMetricEvents", "TicketAudits", + "SatisfactionRatings", + "TicketMetrics", ], ) def test_request_params(self, stream_cls, expected): diff --git a/airbyte-metrics/metrics-lib/build.gradle b/airbyte-metrics/metrics-lib/build.gradle index 32aeba37a53d..cd69896a6e22 100644 --- a/airbyte-metrics/metrics-lib/build.gradle +++ b/airbyte-metrics/metrics-lib/build.gradle @@ -9,6 +9,8 @@ dependencies { implementation project(':airbyte-db:db-lib') implementation libs.otel.semconv + implementation libs.otel.sdk + implementation libs.otel.sdk.testing implementation platform(libs.otel.bom) implementation("io.opentelemetry:opentelemetry-api") implementation("io.opentelemetry:opentelemetry-sdk") @@ -18,7 +20,7 @@ dependencies { testImplementation project(':airbyte-config:config-persistence') testImplementation project(':airbyte-test-utils') - testImplementation libs.testcontainers.postgresql + testImplementation libs.platform.testcontainers.postgresql } Task publishArtifactsTask = getPublishArtifactsTask("$rootProject.ext.version", project) diff --git a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricQueries.java b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricQueries.java index d83e34356932..053a1cc0908c 100644 --- a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricQueries.java +++ b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricQueries.java @@ -6,9 +6,12 @@ import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR; import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR_DEFINITION; +import static io.airbyte.db.instance.configs.jooq.generated.Tables.CONNECTION; import static io.airbyte.db.instance.jobs.jooq.generated.Tables.JOBS; +import static org.jooq.impl.SQLDataType.VARCHAR; import io.airbyte.db.instance.configs.jooq.generated.enums.ReleaseStage; +import io.airbyte.db.instance.configs.jooq.generated.enums.StatusType; import io.airbyte.db.instance.jobs.jooq.generated.enums.JobStatus; import java.util.ArrayList; import java.util.List; @@ -59,7 +62,15 @@ public static int numberOfPendingJobs(final DSLContext ctx) { } public static int numberOfRunningJobs(final DSLContext ctx) { - return ctx.selectCount().from(JOBS).where(JOBS.STATUS.eq(JobStatus.running)).fetchOne(0, int.class); + return ctx.selectCount().from(JOBS).join(CONNECTION).on(CONNECTION.ID.cast(VARCHAR(255)).eq(JOBS.SCOPE)) + .where(JOBS.STATUS.eq(JobStatus.running).and(CONNECTION.STATUS.eq(StatusType.active))) + .fetchOne(0, int.class); + } + + public static int numberOfOrphanRunningJobs(final DSLContext ctx) { + return ctx.selectCount().from(JOBS).join(CONNECTION).on(CONNECTION.ID.cast(VARCHAR(255)).eq(JOBS.SCOPE)) + .where(JOBS.STATUS.eq(JobStatus.running).and(CONNECTION.STATUS.ne(StatusType.active))) + .fetchOne(0, int.class); } public static Long oldestPendingJobAgeSecs(final DSLContext ctx) { diff --git a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/OpenTelemetryMetricClient.java b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/OpenTelemetryMetricClient.java index f5e2bba98c11..45d8ed675e7c 100644 --- a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/OpenTelemetryMetricClient.java +++ b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/OpenTelemetryMetricClient.java @@ -8,6 +8,7 @@ import static io.opentelemetry.api.common.AttributeKey.stringKey; import static io.opentelemetry.semconv.resource.attributes.ResourceAttributes.SERVICE_NAME; +import com.google.common.annotations.VisibleForTesting; import io.opentelemetry.api.OpenTelemetry; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.common.AttributesBuilder; @@ -20,6 +21,7 @@ import io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporter; import io.opentelemetry.sdk.OpenTelemetrySdk; import io.opentelemetry.sdk.metrics.SdkMeterProvider; +import io.opentelemetry.sdk.metrics.export.MetricExporter; import io.opentelemetry.sdk.metrics.export.PeriodicMetricReader; import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.trace.SdkTracerProvider; @@ -28,6 +30,7 @@ public class OpenTelemetryMetricClient implements MetricClient { private Meter meter; + private SdkMeterProvider meterProvider; @Override public void count(MetricsRegistry metric, long val, String... tags) { @@ -75,9 +78,19 @@ public void initialize(MetricEmittingApp metricEmittingApp, String otelEndpoint) .build()) .setResource(resource) .build(); - OtlpGrpcMetricExporter metricExporter = OtlpGrpcMetricExporter.builder() + MetricExporter metricExporter = OtlpGrpcMetricExporter.builder() .setEndpoint(otelEndpoint).build(); - SdkMeterProvider meterProvider = SdkMeterProvider.builder() + initialize(metricEmittingApp, metricExporter, sdkTracerProvider, resource); + } + + @VisibleForTesting + SdkMeterProvider getSdkMeterProvider() { + return meterProvider; + } + + @VisibleForTesting + void initialize(MetricEmittingApp metricEmittingApp, MetricExporter metricExporter, SdkTracerProvider sdkTracerProvider, Resource resource) { + meterProvider = SdkMeterProvider.builder() .registerMetricReader(PeriodicMetricReader.builder(metricExporter).build()) .setResource(resource) .build(); diff --git a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/OssMetricsRegistry.java b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/OssMetricsRegistry.java index 0f528b4cef1b..dea563de9ad8 100644 --- a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/OssMetricsRegistry.java +++ b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/OssMetricsRegistry.java @@ -79,6 +79,10 @@ public enum OssMetricsRegistry implements MetricsRegistry { MetricEmittingApps.METRICS_REPORTER, "num_running_jobs", "number of running jobs"), + NUM_ORPHAN_RUNNING_JOBS( + MetricEmittingApps.METRICS_REPORTER, + "num_orphan_running_jobs", + "number of jobs reported as running that as associated to connection inactive or deprecated"), NUM_ACTIVE_CONN_PER_WORKSPACE( MetricEmittingApps.METRICS_REPORTER, "num_active_conn_per_workspace", diff --git a/airbyte-metrics/metrics-lib/src/test/java/io/airbyte/metrics/lib/MetricsQueriesTest.java b/airbyte-metrics/metrics-lib/src/test/java/io/airbyte/metrics/lib/MetricsQueriesTest.java index 9c443ed1ee31..f5793e41bdc6 100644 --- a/airbyte-metrics/metrics-lib/src/test/java/io/airbyte/metrics/lib/MetricsQueriesTest.java +++ b/airbyte-metrics/metrics-lib/src/test/java/io/airbyte/metrics/lib/MetricsQueriesTest.java @@ -177,18 +177,38 @@ void tearDown() throws SQLException { @Test void runningJobsShouldReturnCorrectCount() throws SQLException { + final var srcId = UUID.randomUUID(); + final var dstId = UUID.randomUUID(); + configDb.transaction( + ctx -> ctx.insertInto(ACTOR, ACTOR.ID, ACTOR.WORKSPACE_ID, ACTOR.ACTOR_DEFINITION_ID, ACTOR.NAME, ACTOR.CONFIGURATION, ACTOR.ACTOR_TYPE) + .values(srcId, UUID.randomUUID(), SRC_DEF_ID, "src", JSONB.valueOf("{}"), ActorType.source) + .values(dstId, UUID.randomUUID(), DST_DEF_ID, "dst", JSONB.valueOf("{}"), ActorType.destination) + .execute()); + final UUID activeConnectionId = UUID.randomUUID(); + final UUID inactiveConnectionId = UUID.randomUUID(); + configDb.transaction( + ctx -> ctx + .insertInto(CONNECTION, CONNECTION.ID, CONNECTION.STATUS, CONNECTION.NAMESPACE_DEFINITION, CONNECTION.SOURCE_ID, + CONNECTION.DESTINATION_ID, CONNECTION.NAME, CONNECTION.CATALOG, CONNECTION.MANUAL) + .values(activeConnectionId, StatusType.active, NamespaceDefinitionType.source, srcId, dstId, "conn", JSONB.valueOf("{}"), true) + .values(inactiveConnectionId, StatusType.inactive, NamespaceDefinitionType.source, srcId, dstId, "conn", JSONB.valueOf("{}"), true) + .execute()); + // non-pending jobs configDb.transaction( - ctx -> ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS).values(1L, "", JobStatus.pending).execute()); + ctx -> ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS).values(1L, activeConnectionId.toString(), JobStatus.pending).execute()); configDb.transaction( - ctx -> ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS).values(2L, "", JobStatus.failed).execute()); + ctx -> ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS).values(2L, activeConnectionId.toString(), JobStatus.failed).execute()); configDb.transaction( - ctx -> ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS).values(3L, "", JobStatus.running).execute()); + ctx -> ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS).values(3L, activeConnectionId.toString(), JobStatus.running).execute()); configDb.transaction( - ctx -> ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS).values(4L, "", JobStatus.running).execute()); + ctx -> ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS).values(4L, activeConnectionId.toString(), JobStatus.running).execute()); + configDb.transaction( + ctx -> ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS).values(5L, inactiveConnectionId.toString(), JobStatus.running).execute()); final var res = configDb.query(MetricQueries::numberOfRunningJobs); - assertEquals(2, res); + assertEquals(2, configDb.query(MetricQueries::numberOfRunningJobs)); + assertEquals(1, configDb.query(MetricQueries::numberOfOrphanRunningJobs)); } @Test diff --git a/airbyte-metrics/metrics-lib/src/test/java/io/airbyte/metrics/lib/OpenTelemetryMetricClientTest.java b/airbyte-metrics/metrics-lib/src/test/java/io/airbyte/metrics/lib/OpenTelemetryMetricClientTest.java index 0c0062c930cb..a6f7ad704f07 100644 --- a/airbyte-metrics/metrics-lib/src/test/java/io/airbyte/metrics/lib/OpenTelemetryMetricClientTest.java +++ b/airbyte-metrics/metrics-lib/src/test/java/io/airbyte/metrics/lib/OpenTelemetryMetricClientTest.java @@ -4,8 +4,18 @@ package io.airbyte.metrics.lib; +import static io.opentelemetry.semconv.resource.attributes.ResourceAttributes.SERVICE_NAME; +import static org.assertj.core.api.AssertionsForClassTypes.assertThat; + +import com.google.common.collect.Iterables; +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.sdk.metrics.SdkMeterProvider; +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.testing.exporter.InMemoryMetricExporter; +import io.opentelemetry.sdk.trace.SdkTracerProvider; +import java.util.List; import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; @@ -13,12 +23,24 @@ class OpenTelemetryMetricClientTest { OpenTelemetryMetricClient openTelemetryMetricClient; - private final static String EXPORTER_ENDPOINT = "http://localhost:4322"; + private final static String TAG = "tag1"; + + private final static MetricEmittingApp METRIC_EMITTING_APP = MetricEmittingApps.WORKER; + private InMemoryMetricExporter metricExporter; + private SdkMeterProvider metricProvider; @BeforeEach void setUp() { openTelemetryMetricClient = new OpenTelemetryMetricClient(); - openTelemetryMetricClient.initialize(MetricEmittingApps.WORKER, EXPORTER_ENDPOINT); + + Resource resource = Resource.getDefault().toBuilder().put(SERVICE_NAME, METRIC_EMITTING_APP.getApplicationName()).build(); + metricExporter = InMemoryMetricExporter.create(); + SdkTracerProvider sdkTracerProvider = SdkTracerProvider.builder() + .setResource(resource) + .build(); + openTelemetryMetricClient.initialize(METRIC_EMITTING_APP, metricExporter, sdkTracerProvider, resource); + + metricProvider = openTelemetryMetricClient.getSdkMeterProvider(); } @AfterEach @@ -27,27 +49,63 @@ void tearDown() { } @Test - @DisplayName("there should be no exception if we attempt to emit metrics while publish is false") - public void testPublishTrueNoEmitError() { - Assertions.assertDoesNotThrow(() -> { - openTelemetryMetricClient.gauge(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS, 1); - }); + @DisplayName("Should send out count metric with correct metric name, description and value") + public void testCountSuccess() { + openTelemetryMetricClient.count(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS, 1); + + metricProvider.forceFlush(); + List metricDataList = metricExporter.getFinishedMetricItems(); + MetricData data = Iterables.getOnlyElement(metricDataList); + + assertThat(data.getName()).isEqualTo(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS.getMetricName()); + assertThat(data.getDescription()).isEqualTo(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS.getMetricDescription()); + assertThat(data.getLongSumData().getPoints().stream().anyMatch(longPointData -> longPointData.getValue() == 1L)); } @Test - @DisplayName("there should be no exception if we attempt to emit metrics while publish is true") - public void testPublishFalseNoEmitError() { - Assertions.assertDoesNotThrow(() -> { - openTelemetryMetricClient.gauge(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS, 1); - }); + @DisplayName("Tags should be passed into metrics") + public void testCountWithTagSuccess() { + openTelemetryMetricClient.count(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS, 1, TAG); + + metricProvider.forceFlush(); + List metricDataList = metricExporter.getFinishedMetricItems(); + MetricData data = Iterables.getOnlyElement(metricDataList); + + assertThat(data.getName()).isEqualTo(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS.getMetricName()); + assertThat(data.getDescription()).isEqualTo(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS.getMetricDescription()); + assertThat(data.getLongSumData().getPoints().stream() + .anyMatch( + longPointData -> longPointData.getValue() == 1L && longPointData.getAttributes().get(AttributeKey.stringKey(TAG)).equals(TAG))); } @Test - @DisplayName("there should be no exception if we attempt to emit metrics without initializing") - public void testNoInitializeNoEmitError() { - Assertions.assertDoesNotThrow(() -> { - openTelemetryMetricClient.gauge(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS, 1); - }); + @DisplayName("Should send out gauge metric with correct metric name, description and value") + public void testGaugeSuccess() throws Exception { + openTelemetryMetricClient.gauge(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS, 1); + + metricProvider.forceFlush(); + List metricDataList = metricExporter.getFinishedMetricItems(); + MetricData data = Iterables.getOnlyElement(metricDataList); + + assertThat(data.getName()).isEqualTo(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS.getMetricName()); + assertThat(data.getDescription()).isEqualTo(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS.getMetricDescription()); + assertThat(data.getDoubleGaugeData().getPoints().stream().anyMatch(doublePointData -> doublePointData.getValue() == 1.0)); + } + + @Test + @DisplayName("Should send out histogram metric with correct metric name, description and value") + public void testHistogramSuccess() { + openTelemetryMetricClient.distribution(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS, 10); + openTelemetryMetricClient.distribution(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS, 30); + + metricProvider.forceFlush(); + List metricDataList = metricExporter.getFinishedMetricItems(); + MetricData data = Iterables.getOnlyElement(metricDataList); + + assertThat(data.getName()).isEqualTo(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS.getMetricName()); + assertThat(data.getDescription()).isEqualTo(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS.getMetricDescription()); + assertThat(data.getHistogramData().getPoints().stream().anyMatch(histogramPointData -> histogramPointData.getMax() == 30.0)); + assertThat(data.getHistogramData().getPoints().stream().anyMatch(histogramPointData -> histogramPointData.getMin() == 10.0)); } } diff --git a/airbyte-metrics/reporter/Dockerfile b/airbyte-metrics/reporter/Dockerfile index 53b5edf7ec3e..b6a182b7937d 100644 --- a/airbyte-metrics/reporter/Dockerfile +++ b/airbyte-metrics/reporter/Dockerfile @@ -2,7 +2,7 @@ ARG JDK_VERSION=17.0.1 ARG JDK_IMAGE=openjdk:${JDK_VERSION}-slim FROM ${JDK_IMAGE} AS metrics-reporter -ARG VERSION=0.39.17-alpha +ARG VERSION=0.39.28-alpha ENV APPLICATION airbyte-metrics-reporter ENV VERSION ${VERSION} diff --git a/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/ToEmit.java b/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/ToEmit.java index 90e416401dab..b63366ef2141 100644 --- a/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/ToEmit.java +++ b/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/ToEmit.java @@ -30,6 +30,10 @@ public enum ToEmit { final var runningJobs = ReporterApp.configDatabase.query(MetricQueries::numberOfRunningJobs); MetricClientFactory.getMetricClient().gauge(OssMetricsRegistry.NUM_RUNNING_JOBS, runningJobs); })), + NUM_ORPHAN_RUNNING_JOB(countMetricEmission(() -> { + final var orphanRunningJobs = ReporterApp.configDatabase.query(MetricQueries::numberOfOrphanRunningJobs); + MetricClientFactory.getMetricClient().gauge(OssMetricsRegistry.NUM_ORPHAN_RUNNING_JOBS, orphanRunningJobs); + })), OLDEST_RUNNING_JOB_AGE_SECS(countMetricEmission(() -> { final var age = ReporterApp.configDatabase.query(MetricQueries::oldestRunningJobAgeSecs); MetricClientFactory.getMetricClient().gauge(OssMetricsRegistry.OLDEST_RUNNING_JOB_AGE_SECS, age); diff --git a/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/CatalogHelpers.java b/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/CatalogHelpers.java index 29d540454fd4..f1d6aea57d44 100644 --- a/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/CatalogHelpers.java +++ b/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/CatalogHelpers.java @@ -8,15 +8,25 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; +import com.google.common.collect.Sets; +import io.airbyte.commons.json.JsonSchemas; +import io.airbyte.commons.json.JsonSchemas.FieldNameOrList; import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.util.MoreIterators; +import io.airbyte.commons.util.MoreLists; +import io.airbyte.protocol.models.transform_models.FieldTransform; +import io.airbyte.protocol.models.transform_models.StreamTransform; +import io.airbyte.protocol.models.transform_models.UpdateFieldSchemaTransform; +import io.airbyte.protocol.models.transform_models.UpdateStreamTransform; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; -import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; +import org.apache.commons.lang3.tuple.Pair; /** * Helper class for Catalog and Stream related operations. Generally only used in tests. @@ -58,6 +68,64 @@ public static ConfiguredAirbyteStream createConfiguredAirbyteStream(final String .withSyncMode(SyncMode.FULL_REFRESH).withDestinationSyncMode(DestinationSyncMode.OVERWRITE); } + /** + * Converts a {@link ConfiguredAirbyteCatalog} into an {@link AirbyteCatalog}. This is possible + * because the latter is a subset of the former. + * + * @param configuredCatalog - catalog to convert + * @return - airbyte catalog + */ + public static AirbyteCatalog configuredCatalogToCatalog(final ConfiguredAirbyteCatalog configuredCatalog) { + return new AirbyteCatalog().withStreams( + configuredCatalog.getStreams() + .stream() + .map(ConfiguredAirbyteStream::getStream) + .toList()); + } + + /** + * Extracts {@link StreamDescriptor} for a given {@link AirbyteStream} + * + * @param airbyteStream stream + * @return stream descriptor + */ + public static StreamDescriptor extractDescriptor(final ConfiguredAirbyteStream airbyteStream) { + return extractDescriptor(airbyteStream.getStream()); + } + + /** + * Extracts {@link StreamDescriptor} for a given {@link ConfiguredAirbyteStream} + * + * @param airbyteStream stream + * @return stream descriptor + */ + public static StreamDescriptor extractDescriptor(final AirbyteStream airbyteStream) { + return new StreamDescriptor().withName(airbyteStream.getName()).withNamespace(airbyteStream.getNamespace()); + } + + /** + * Extracts {@link StreamDescriptor}s for each stream in a given {@link ConfiguredAirbyteCatalog} + * + * @param configuredCatalog catalog + * @return list of stream descriptors + */ + public static List extractStreamDescriptors(final ConfiguredAirbyteCatalog configuredCatalog) { + return extractStreamDescriptors(configuredCatalogToCatalog(configuredCatalog)); + } + + /** + * Extracts {@link StreamDescriptor}s for each stream in a given {@link AirbyteCatalog} + * + * @param catalog catalog + * @return list of stream descriptors + */ + public static List extractStreamDescriptors(final AirbyteCatalog catalog) { + return catalog.getStreams() + .stream() + .map(CatalogHelpers::extractDescriptor) + .toList(); + } + /** * Convert a Catalog into a ConfiguredCatalog. This applies minimum default to the Catalog to make * it a valid ConfiguredCatalog. @@ -70,7 +138,7 @@ public static ConfiguredAirbyteCatalog toDefaultConfiguredCatalog(final AirbyteC .withStreams(catalog.getStreams() .stream() .map(CatalogHelpers::toDefaultConfiguredStream) - .collect(Collectors.toList())); + .toList()); } public static ConfiguredAirbyteStream toDefaultConfiguredStream(final AirbyteStream stream) { @@ -124,31 +192,151 @@ public static Set getTopLevelFieldNames(final ConfiguredAirbyteStream st } /** - * @param node any json node + * @param jsonSchema - a JSONSchema node * @return a set of all keys for all objects within the node */ @VisibleForTesting - protected static Set getAllFieldNames(final JsonNode node) { - final Set allFieldNames = new HashSet<>(); - - if (node.has("properties")) { - final JsonNode properties = node.get("properties"); - final Iterator fieldNames = properties.fieldNames(); - while (fieldNames.hasNext()) { - final String fieldName = fieldNames.next(); - allFieldNames.add(fieldName); - final JsonNode fieldValue = properties.get(fieldName); - if (fieldValue.isObject()) { - allFieldNames.addAll(getAllFieldNames(fieldValue)); - } + protected static Set getAllFieldNames(final JsonNode jsonSchema) { + return getFullyQualifiedFieldNamesWithTypes(jsonSchema) + .stream() + .map(Pair::getLeft) + // only need field name, not fully qualified name + .map(MoreLists::last) + .flatMap(Optional::stream) + .collect(Collectors.toSet()); + } + + /** + * Extracts all fields and their schemas from a JSONSchema. This method returns values in + * depth-first search preorder. It short circuits at oneOfs--in other words, child fields of a oneOf + * are not returned. + * + * @param jsonSchema - a JSONSchema node + * @return a list of all keys for all objects within the node. ordered in depth-first search + * preorder. + */ + @VisibleForTesting + protected static List, JsonNode>> getFullyQualifiedFieldNamesWithTypes(final JsonNode jsonSchema) { + // if this were ever a performance issue, it could be replaced with a trie. this seems unlikely + // however. + final Set> fieldNamesThatAreOneOfs = new HashSet<>(); + + return JsonSchemas.traverseJsonSchemaWithCollector(jsonSchema, (node, basicPath) -> { + final List fieldName = basicPath.stream().filter(fieldOrList -> !fieldOrList.isList()).map(FieldNameOrList::getFieldName).toList(); + return Pair.of(fieldName, node); + }) + .stream() + // first node is the original object. + .skip(1) + .filter(fieldWithSchema -> filterChildrenOfFoneOneOf(fieldWithSchema.getLeft(), fieldWithSchema.getRight(), fieldNamesThatAreOneOfs)) + .toList(); + } + + /** + * Predicate that checks if a field is a CHILD of a oneOf field. If child of a oneOf, returns false. + * Otherwise, true. This method as side effects. It assumes that it will be run in order on field + * names returned in depth-first search preoorder. As it encounters oneOfs it adds them to a + * collection. It then checks if subsequent field names are prefix matches to the field that are + * oneOfs. + * + * @param fieldName - field to investigate + * @param schema - schema of field + * @param oneOfFieldNameAccumulator - collection of fields that are oneOfs + * @return If child of a oneOf, returns false. Otherwise, true. + */ + private static boolean filterChildrenOfFoneOneOf(final List fieldName, + final JsonNode schema, + final Set> oneOfFieldNameAccumulator) { + if (isOneOfField(schema)) { + oneOfFieldNameAccumulator.add(fieldName); + // return early because we know it is a oneOf and therefore cannot be a child of a oneOf. + return true; + } + + // leverage that nodes are returned in depth-first search preorder. this means the parent field for + // the oneOf will be present in the list BEFORE any of its children. + for (final List oneOfFieldName : oneOfFieldNameAccumulator) { + final String oneOfFieldNameString = String.join(".", oneOfFieldName); + final String fieldNameString = String.join(".", fieldName); + + if (fieldNameString.startsWith(oneOfFieldNameString)) { + return false; } } + return true; + } - return allFieldNames; + private static boolean isOneOfField(final JsonNode schema) { + return !MoreIterators.toSet(schema.fieldNames()).contains("type"); } - private static boolean isObjectWithSubFields(Field field) { + private static boolean isObjectWithSubFields(final Field field) { return field.getType() == JsonSchemaType.OBJECT && field.getSubFields() != null && !field.getSubFields().isEmpty(); } + public static StreamDescriptor extractStreamDescriptor(final AirbyteStream airbyteStream) { + return new StreamDescriptor().withName(airbyteStream.getName()).withNamespace(airbyteStream.getNamespace()); + } + + private static Map streamDescriptorToMap(final AirbyteCatalog catalog) { + return catalog.getStreams() + .stream() + .collect(Collectors.toMap(CatalogHelpers::extractStreamDescriptor, s -> s)); + } + + /** + * Returns difference between two provided catalogs. + * + * @param oldCatalog - old catalog + * @param newCatalog - new catalog + * @return difference between old and new catalogs + */ + public static Set getCatalogDiff(final AirbyteCatalog oldCatalog, final AirbyteCatalog newCatalog) { + final Set streamTransforms = new HashSet<>(); + + final Map descriptorToStreamOld = streamDescriptorToMap(oldCatalog); + final Map descriptorToStreamNew = streamDescriptorToMap(newCatalog); + + Sets.difference(descriptorToStreamOld.keySet(), descriptorToStreamNew.keySet()) + .forEach(descriptor -> streamTransforms.add(StreamTransform.createRemoveStreamTransform(descriptor))); + Sets.difference(descriptorToStreamNew.keySet(), descriptorToStreamOld.keySet()) + .forEach(descriptor -> streamTransforms.add(StreamTransform.createAddStreamTransform(descriptor))); + Sets.intersection(descriptorToStreamOld.keySet(), descriptorToStreamNew.keySet()) + .forEach(descriptor -> { + final AirbyteStream streamOld = descriptorToStreamOld.get(descriptor); + final AirbyteStream streamNew = descriptorToStreamNew.get(descriptor); + if (!streamOld.equals(streamNew)) { + streamTransforms.add(StreamTransform.createUpdateStreamTransform(descriptor, getStreamDiff(descriptor, streamOld, streamNew))); + } + }); + + return streamTransforms; + } + + private static UpdateStreamTransform getStreamDiff(final StreamDescriptor descriptor, + final AirbyteStream streamOld, + final AirbyteStream streamNew) { + final Set fieldTransforms = new HashSet<>(); + final Map, JsonNode> fieldNameToTypeOld = getFullyQualifiedFieldNamesWithTypes(streamOld.getJsonSchema()) + .stream() + .collect(Collectors.toMap(Pair::getLeft, Pair::getRight)); + final Map, JsonNode> fieldNameToTypeNew = getFullyQualifiedFieldNamesWithTypes(streamNew.getJsonSchema()) + .stream() + .collect(Collectors.toMap(Pair::getLeft, Pair::getRight)); + + Sets.difference(fieldNameToTypeOld.keySet(), fieldNameToTypeNew.keySet()) + .forEach(fieldName -> fieldTransforms.add(FieldTransform.createRemoveFieldTransform(fieldName, fieldNameToTypeOld.get(fieldName)))); + Sets.difference(fieldNameToTypeNew.keySet(), fieldNameToTypeOld.keySet()) + .forEach(fieldName -> fieldTransforms.add(FieldTransform.createAddFieldTransform(fieldName, fieldNameToTypeNew.get(fieldName)))); + Sets.intersection(fieldNameToTypeOld.keySet(), fieldNameToTypeNew.keySet()).forEach(fieldName -> { + final JsonNode oldType = fieldNameToTypeOld.get(fieldName); + final JsonNode newType = fieldNameToTypeNew.get(fieldName); + + if (!oldType.equals(newType)) { + fieldTransforms.add(FieldTransform.createUpdateFieldTransform(fieldName, new UpdateFieldSchemaTransform(oldType, newType))); + } + }); + return new UpdateStreamTransform(fieldTransforms); + } + } diff --git a/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/transform_models/AddFieldTransform.java b/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/transform_models/AddFieldTransform.java new file mode 100644 index 000000000000..e70617c737fd --- /dev/null +++ b/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/transform_models/AddFieldTransform.java @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.protocol.models.transform_models; + +import com.fasterxml.jackson.databind.JsonNode; +import lombok.AllArgsConstructor; +import lombok.EqualsAndHashCode; +import lombok.ToString; + +/** + * Represents the addition of a field to an {@link io.airbyte.protocol.models.AirbyteStream}. + */ +@AllArgsConstructor +@EqualsAndHashCode +@ToString +public class AddFieldTransform { + + private final JsonNode schema; + + public JsonNode getSchema() { + return schema; + } + +} diff --git a/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/transform_models/AddStreamTransform.java b/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/transform_models/AddStreamTransform.java new file mode 100644 index 000000000000..804ad13ced39 --- /dev/null +++ b/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/transform_models/AddStreamTransform.java @@ -0,0 +1,27 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.protocol.models.transform_models; + +import io.airbyte.protocol.models.StreamDescriptor; +import lombok.AllArgsConstructor; +import lombok.EqualsAndHashCode; +import lombok.ToString; + +/** + * Represents the addition of an {@link io.airbyte.protocol.models.AirbyteStream} to a + * {@link io.airbyte.protocol.models.AirbyteCatalog}. + */ +@AllArgsConstructor +@EqualsAndHashCode +@ToString +public class AddStreamTransform { + + private final StreamDescriptor streamDescriptor; + + public StreamDescriptor getStreamDescriptor() { + return streamDescriptor; + } + +} diff --git a/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/transform_models/FieldTransform.java b/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/transform_models/FieldTransform.java new file mode 100644 index 000000000000..c9481b93ae26 --- /dev/null +++ b/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/transform_models/FieldTransform.java @@ -0,0 +1,67 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.protocol.models.transform_models; + +import com.fasterxml.jackson.databind.JsonNode; +import java.util.List; +import lombok.AllArgsConstructor; +import lombok.EqualsAndHashCode; +import lombok.ToString; + +/** + * Represents the diff between two fields. + */ +@AllArgsConstructor +@EqualsAndHashCode +@ToString +public final class FieldTransform { + + private final FieldTransformType transformType; + private final List fieldName; + private final AddFieldTransform addFieldTransform; + private final RemoveFieldTransform removeFieldTransform; + private final UpdateFieldSchemaTransform updateFieldTransform; + + public static FieldTransform createAddFieldTransform(final List fieldName, final JsonNode schema) { + return createAddFieldTransform(fieldName, new AddFieldTransform(schema)); + } + + public static FieldTransform createAddFieldTransform(final List fieldName, final AddFieldTransform addFieldTransform) { + return new FieldTransform(FieldTransformType.ADD_FIELD, fieldName, addFieldTransform, null, null); + } + + public static FieldTransform createRemoveFieldTransform(final List fieldName, final JsonNode schema) { + return createRemoveFieldTransform(fieldName, new RemoveFieldTransform(fieldName, schema)); + } + + public static FieldTransform createRemoveFieldTransform(final List fieldName, final RemoveFieldTransform removeFieldTransform) { + return new FieldTransform(FieldTransformType.REMOVE_FIELD, fieldName, null, removeFieldTransform, null); + } + + public static FieldTransform createUpdateFieldTransform(final List fieldName, final UpdateFieldSchemaTransform updateFieldTransform) { + return new FieldTransform(FieldTransformType.UPDATE_FIELD_SCHEMA, fieldName, null, null, updateFieldTransform); + } + + public FieldTransformType getTransformType() { + return transformType; + } + + public List getFieldName() { + return fieldName; + } + + public AddFieldTransform getAddFieldTransform() { + return addFieldTransform; + } + + public RemoveFieldTransform getRemoveFieldTransform() { + return removeFieldTransform; + } + + public UpdateFieldSchemaTransform getUpdateFieldTransform() { + return updateFieldTransform; + } + +} diff --git a/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/transform_models/FieldTransformType.java b/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/transform_models/FieldTransformType.java new file mode 100644 index 000000000000..ade561ebb89c --- /dev/null +++ b/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/transform_models/FieldTransformType.java @@ -0,0 +1,14 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.protocol.models.transform_models; + +/** + * Types of transformations possible for a field. + */ +public enum FieldTransformType { + ADD_FIELD, + REMOVE_FIELD, + UPDATE_FIELD_SCHEMA +} diff --git a/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/transform_models/RemoveFieldTransform.java b/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/transform_models/RemoveFieldTransform.java new file mode 100644 index 000000000000..a48314c3fa81 --- /dev/null +++ b/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/transform_models/RemoveFieldTransform.java @@ -0,0 +1,33 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.protocol.models.transform_models; + +import com.fasterxml.jackson.databind.JsonNode; +import java.util.ArrayList; +import java.util.List; +import lombok.AllArgsConstructor; +import lombok.EqualsAndHashCode; +import lombok.ToString; + +/** + * Represents the removal of a field to an {@link io.airbyte.protocol.models.AirbyteStream}. + */ +@AllArgsConstructor +@EqualsAndHashCode +@ToString +public class RemoveFieldTransform { + + private final List fieldName; + private final JsonNode schema; + + public List getFieldName() { + return new ArrayList<>(fieldName); + } + + public JsonNode getSchema() { + return schema; + } + +} diff --git a/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/transform_models/RemoveStreamTransform.java b/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/transform_models/RemoveStreamTransform.java new file mode 100644 index 000000000000..c2582f37b71f --- /dev/null +++ b/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/transform_models/RemoveStreamTransform.java @@ -0,0 +1,27 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.protocol.models.transform_models; + +import io.airbyte.protocol.models.StreamDescriptor; +import lombok.AllArgsConstructor; +import lombok.EqualsAndHashCode; +import lombok.ToString; + +/** + * Represents the removal of an {@link io.airbyte.protocol.models.AirbyteStream} to a + * {@link io.airbyte.protocol.models.AirbyteCatalog}. + */ +@AllArgsConstructor +@EqualsAndHashCode +@ToString +public class RemoveStreamTransform { + + private final StreamDescriptor streamDescriptor; + + public StreamDescriptor getStreamDescriptor() { + return streamDescriptor; + } + +} diff --git a/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/transform_models/StreamTransform.java b/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/transform_models/StreamTransform.java new file mode 100644 index 000000000000..14e21e5cdb8e --- /dev/null +++ b/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/transform_models/StreamTransform.java @@ -0,0 +1,49 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.protocol.models.transform_models; + +import io.airbyte.protocol.models.StreamDescriptor; +import lombok.AllArgsConstructor; +import lombok.EqualsAndHashCode; +import lombok.ToString; + +/** + * Represents the diff between two {@link io.airbyte.protocol.models.AirbyteStream}. + */ +@AllArgsConstructor +@EqualsAndHashCode +@ToString +public final class StreamTransform { + + private final StreamTransformType transformType; + private final StreamDescriptor streamDescriptor; + private final UpdateStreamTransform updateStreamTransform; + + public static StreamTransform createAddStreamTransform(final StreamDescriptor streamDescriptor) { + return new StreamTransform(StreamTransformType.ADD_STREAM, streamDescriptor, null); + } + + public static StreamTransform createRemoveStreamTransform(final StreamDescriptor streamDescriptor) { + return new StreamTransform(StreamTransformType.REMOVE_STREAM, streamDescriptor, null); + } + + public static StreamTransform createUpdateStreamTransform(final StreamDescriptor streamDescriptor, + final UpdateStreamTransform updateStreamTransform) { + return new StreamTransform(StreamTransformType.UPDATE_STREAM, streamDescriptor, updateStreamTransform); + } + + public StreamTransformType getTransformType() { + return transformType; + } + + public StreamDescriptor getStreamDescriptor() { + return streamDescriptor; + } + + public UpdateStreamTransform getUpdateStreamTransform() { + return updateStreamTransform; + } + +} diff --git a/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/transform_models/StreamTransformType.java b/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/transform_models/StreamTransformType.java new file mode 100644 index 000000000000..297bff7e87a9 --- /dev/null +++ b/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/transform_models/StreamTransformType.java @@ -0,0 +1,14 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.protocol.models.transform_models; + +/** + * Types of transformations possible for a stream. + */ +public enum StreamTransformType { + ADD_STREAM, + REMOVE_STREAM, + UPDATE_STREAM +} diff --git a/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/transform_models/UpdateFieldSchemaTransform.java b/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/transform_models/UpdateFieldSchemaTransform.java new file mode 100644 index 000000000000..4f72c0b62e0a --- /dev/null +++ b/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/transform_models/UpdateFieldSchemaTransform.java @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.protocol.models.transform_models; + +import com.fasterxml.jackson.databind.JsonNode; +import lombok.AllArgsConstructor; +import lombok.EqualsAndHashCode; +import lombok.ToString; + +/** + * Represents the update of a field. + */ +@AllArgsConstructor +@EqualsAndHashCode +@ToString +public class UpdateFieldSchemaTransform { + + private final JsonNode oldSchema; + private final JsonNode newSchema; + + public JsonNode getOldSchema() { + return oldSchema; + } + + public JsonNode getNewSchema() { + return newSchema; + } + +} diff --git a/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/transform_models/UpdateStreamTransform.java b/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/transform_models/UpdateStreamTransform.java new file mode 100644 index 000000000000..4814cf78cc42 --- /dev/null +++ b/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/transform_models/UpdateStreamTransform.java @@ -0,0 +1,27 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.protocol.models.transform_models; + +import java.util.HashSet; +import java.util.Set; +import lombok.AllArgsConstructor; +import lombok.EqualsAndHashCode; +import lombok.ToString; + +/** + * Represents the update of an {@link io.airbyte.protocol.models.AirbyteStream}. + */ +@AllArgsConstructor +@EqualsAndHashCode +@ToString +public class UpdateStreamTransform { + + private final Set fieldTransforms; + + public Set getFieldTransforms() { + return new HashSet<>(fieldTransforms); + } + +} diff --git a/airbyte-protocol/protocol-models/src/main/resources/airbyte_protocol/airbyte_protocol.yaml b/airbyte-protocol/protocol-models/src/main/resources/airbyte_protocol/airbyte_protocol.yaml index 3b81b2a80bd5..68e6bf61c1a9 100644 --- a/airbyte-protocol/protocol-models/src/main/resources/airbyte_protocol/airbyte_protocol.yaml +++ b/airbyte-protocol/protocol-models/src/main/resources/airbyte_protocol/airbyte_protocol.yaml @@ -55,68 +55,83 @@ definitions: - data - emitted_at properties: + namespace: + description: "namespace the data is associated with" + type: string stream: - description: "the name of this record's stream" + description: "stream the data is associated with" type: string data: - description: "the record data" + description: "record data" type: object existingJavaType: com.fasterxml.jackson.databind.JsonNode emitted_at: description: "when the data was emitted from the source. epoch in millisecond." type: integer - namespace: - description: "the namespace of this record's stream" - type: string AirbyteStateMessage: type: object additionalProperties: true properties: - state_type: + type: "$ref": "#/definitions/AirbyteStateType" + stream: + "$ref": "#/definitions/AirbyteStreamState" + global: + "$ref": "#/definitions/AirbyteGlobalState" data: description: "(Deprecated) the state data" type: object existingJavaType: com.fasterxml.jackson.databind.JsonNode - global: - "$ref": "#/definitions/AirbyteStateBlob" - streams: - type: array - items: - "$ref": "#/definitions/AirbyteStreamState" - AirbyteStateType: type: string description: > The type of state the other fields represent. - If not set, the state data is interpreted as GLOBAL and should be read from the `data` field for backwards compatibility. - GLOBAL means that the state should be read from `global` and means that it represents the state for all the streams. - PER_STREAM means that the state should be read from `streams`. Each item in the list represents the state for the associated stream. + Is set to LEGACY, the state data should be read from the `data` field for backwards compatibility. + If not set, assume the state object is type LEGACY. + GLOBAL means that the state should be read from `global` and means that it represents the state for all the streams. It contains one shared + state and individual stream states. + PER_STREAM means that the state should be read from `stream`. The state present in this field correspond to the isolated state of the + associated stream description. enum: - GLOBAL - - PER_STREAM - + - STREAM + - LEGACY AirbyteStreamState: type: object - description: "per stream state data" - additionalProperties: false + additionalProperties: true + required: + - stream_descriptor + properties: + stream_descriptor: + "$ref": "#/definitions/StreamDescriptor" + stream_state: + "$ref": "#/definitions/AirbyteStateBlob" + AirbyteGlobalState: + type: object + additionalProperties: true + required: + - stream_states + properties: + shared_state: + "$ref": "#/definitions/AirbyteStateBlob" + stream_states: + type: array + items: + "$ref": "#/definitions/AirbyteStreamState" + StreamDescriptor: + type: object + additionalProperties: true required: - name - - state properties: name: - description: "Stream name" type: string - state: - "$ref": "#/definitions/AirbyteStateBlob" namespace: - description: Optional Source-defined namespace. type: string - AirbyteStateBlob: type: object description: "the state data" - additionalProperties: false + additionalProperties: true existingJavaType: com.fasterxml.jackson.databind.JsonNode AirbyteLogMessage: @@ -127,7 +142,7 @@ definitions: - message properties: level: - description: "the type of logging" + description: "log level" type: string enum: - FATAL @@ -137,7 +152,7 @@ definitions: - DEBUG - TRACE message: - description: "the log message" + description: "log message" type: string AirbyteTraceMessage: type: object @@ -364,10 +379,10 @@ definitions: # Connector Type Properties (Common to all connectors from same type) # Source Connectors Properties supportsIncremental: - description: If the connector supports incremental mode or not. + description: (deprecated) If the connector supports incremental mode or not. type: boolean # Destination Connectors Properties - # Normalization is currently implemented using dbt so it requires `supportsDBT` to be true for this to be true. + # Normalization is currently implemented using dbt, so it requires `supportsDBT` to be true for this to be true. supportsNormalization: description: If the connector supports normalization or not. type: boolean diff --git a/airbyte-protocol/protocol-models/src/test/java/io/airbyte/protocol/models/CatalogHelpersTest.java b/airbyte-protocol/protocol-models/src/test/java/io/airbyte/protocol/models/CatalogHelpersTest.java index 43cd93aa8d78..ae3cc50d8738 100644 --- a/airbyte-protocol/protocol-models/src/test/java/io/airbyte/protocol/models/CatalogHelpersTest.java +++ b/airbyte-protocol/protocol-models/src/test/java/io/airbyte/protocol/models/CatalogHelpersTest.java @@ -7,17 +7,26 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.resources.MoreResources; +import io.airbyte.protocol.models.transform_models.FieldTransform; +import io.airbyte.protocol.models.transform_models.StreamTransform; +import io.airbyte.protocol.models.transform_models.UpdateFieldSchemaTransform; +import io.airbyte.protocol.models.transform_models.UpdateStreamTransform; import java.io.IOException; +import java.util.Comparator; import java.util.List; import java.util.Set; +import java.util.stream.Stream; import org.junit.jupiter.api.Test; class CatalogHelpersTest { + // handy for debugging test only. + private static final Comparator STREAM_TRANSFORM_COMPARATOR = + Comparator.comparing(StreamTransform::getTransformType); + @Test void testFieldToJsonSchema() { final String expected = """ @@ -72,10 +81,38 @@ void testGetTopLevelFieldNames() { void testGetFieldNames() throws IOException { final JsonNode node = Jsons.deserialize(MoreResources.readResource("valid_schema.json")); final Set actualFieldNames = CatalogHelpers.getAllFieldNames(node); - final Set expectedFieldNames = - ImmutableSet.of("date", "CAD", "HKD", "ISK", "PHP", "DKK", "HUF", "ꖇ", "somekey", "something", "nestedkey"); + final List expectedFieldNames = + List.of("CAD", "DKK", "HKD", "HUF", "ISK", "PHP", "date", "nestedkey", "somekey", "something", "something2", "ꖇ"); + + // sort so that the diff is easier to read. + assertEquals(expectedFieldNames.stream().sorted().toList(), actualFieldNames.stream().sorted().toList()); + } + + @Test + void testGetCatalogDiff() throws IOException { + final JsonNode schema1 = Jsons.deserialize(MoreResources.readResource("valid_schema.json")); + final JsonNode schema2 = Jsons.deserialize(MoreResources.readResource("valid_schema2.json")); + final AirbyteCatalog catalog1 = new AirbyteCatalog().withStreams(List.of( + new AirbyteStream().withName("users").withJsonSchema(schema1), + new AirbyteStream().withName("accounts").withJsonSchema(Jsons.emptyObject()))); + final AirbyteCatalog catalog2 = new AirbyteCatalog().withStreams(List.of( + new AirbyteStream().withName("users").withJsonSchema(schema2), + new AirbyteStream().withName("sales").withJsonSchema(Jsons.emptyObject()))); - assertEquals(expectedFieldNames, actualFieldNames); + final Set actualDiff = CatalogHelpers.getCatalogDiff(catalog1, catalog2); + final List expectedDiff = Stream.of( + StreamTransform.createAddStreamTransform(new StreamDescriptor().withName("sales")), + StreamTransform.createRemoveStreamTransform(new StreamDescriptor().withName("accounts")), + StreamTransform.createUpdateStreamTransform(new StreamDescriptor().withName("users"), new UpdateStreamTransform(Set.of( + FieldTransform.createAddFieldTransform(List.of("COD"), schema2.get("properties").get("COD")), + FieldTransform.createRemoveFieldTransform(List.of("something2"), schema1.get("properties").get("something2")), + FieldTransform.createRemoveFieldTransform(List.of("HKD"), schema1.get("properties").get("HKD")), + FieldTransform.createUpdateFieldTransform(List.of("CAD"), new UpdateFieldSchemaTransform( + schema1.get("properties").get("CAD"), + schema2.get("properties").get("CAD"))))))) + .sorted(STREAM_TRANSFORM_COMPARATOR) + .toList(); + assertEquals(expectedDiff, actualDiff.stream().sorted(STREAM_TRANSFORM_COMPARATOR).toList()); } } diff --git a/airbyte-protocol/protocol-models/src/test/resources/valid_schema.json b/airbyte-protocol/protocol-models/src/test/resources/valid_schema.json index 0a87904fafd2..a5b7b656f3e2 100644 --- a/airbyte-protocol/protocol-models/src/test/resources/valid_schema.json +++ b/airbyte-protocol/protocol-models/src/test/resources/valid_schema.json @@ -24,6 +24,26 @@ "patternProperties": { ".+": {} } + }, + "something2": { + "oneOf": [ + { + "type": "object", + "properties": { + "oneOfOne": { + "type": "string" + } + } + }, + { + "type": "object", + "properties": { + "oneOfTwo": { + "type": "string" + } + } + } + ] } } } diff --git a/airbyte-protocol/protocol-models/src/test/resources/valid_schema2.json b/airbyte-protocol/protocol-models/src/test/resources/valid_schema2.json new file mode 100644 index 000000000000..f84e8458be7c --- /dev/null +++ b/airbyte-protocol/protocol-models/src/test/resources/valid_schema2.json @@ -0,0 +1,29 @@ +{ + "type": "object", + "properties": { + "date": { "type": "string", "format": "date-time" }, + "CAD": { "type": ["null", "string"] }, + "COD": { "type": ["null", "string"] }, + "ISK": { "type": ["null", "number"] }, + "PHP": { "type": ["null", "number"] }, + "DKK": { "type": ["null", "number"] }, + "HUF": { "type": ["null", "number"] }, + "ꖇ": { "type": ["null", "number"] }, + "something": { + "type": ["null", "object"], + "properties": { + "somekey": { + "type": ["null", "object"], + "properties": { + "nestedkey": { + "type": ["null", "number"] + } + } + } + }, + "patternProperties": { + ".+": {} + } + } + } +} diff --git a/airbyte-scheduler/client/src/main/java/io/airbyte/scheduler/client/EventRunner.java b/airbyte-scheduler/client/src/main/java/io/airbyte/scheduler/client/EventRunner.java index c657d63a736c..07ce8bc660b8 100644 --- a/airbyte-scheduler/client/src/main/java/io/airbyte/scheduler/client/EventRunner.java +++ b/airbyte-scheduler/client/src/main/java/io/airbyte/scheduler/client/EventRunner.java @@ -4,7 +4,9 @@ package io.airbyte.scheduler.client; +import io.airbyte.protocol.models.StreamDescriptor; import io.airbyte.workers.temporal.TemporalClient.ManualOperationResult; +import java.util.List; import java.util.Set; import java.util.UUID; @@ -16,9 +18,9 @@ public interface EventRunner { ManualOperationResult startNewCancellation(final UUID connectionId); - ManualOperationResult resetConnection(final UUID connectionId); + ManualOperationResult resetConnection(final UUID connectionId, final List streamsToReset); - ManualOperationResult synchronousResetConnection(final UUID connectionId); + ManualOperationResult synchronousResetConnection(final UUID connectionId, final List streamsToReset); void deleteConnection(final UUID connectionId); diff --git a/airbyte-scheduler/client/src/main/java/io/airbyte/scheduler/client/TemporalEventRunner.java b/airbyte-scheduler/client/src/main/java/io/airbyte/scheduler/client/TemporalEventRunner.java index 87e18b105114..90846af5cb98 100644 --- a/airbyte-scheduler/client/src/main/java/io/airbyte/scheduler/client/TemporalEventRunner.java +++ b/airbyte-scheduler/client/src/main/java/io/airbyte/scheduler/client/TemporalEventRunner.java @@ -4,8 +4,10 @@ package io.airbyte.scheduler.client; +import io.airbyte.protocol.models.StreamDescriptor; import io.airbyte.workers.temporal.TemporalClient; import io.airbyte.workers.temporal.TemporalClient.ManualOperationResult; +import java.util.List; import java.util.Set; import java.util.UUID; import lombok.AllArgsConstructor; @@ -27,12 +29,12 @@ public ManualOperationResult startNewCancellation(final UUID connectionId) { return temporalClient.startNewCancellation(connectionId); } - public ManualOperationResult resetConnection(final UUID connectionId) { - return temporalClient.resetConnection(connectionId); + public ManualOperationResult resetConnection(final UUID connectionId, final List streamsToReset) { + return temporalClient.resetConnection(connectionId, streamsToReset); } - public ManualOperationResult synchronousResetConnection(final UUID connectionId) { - return temporalClient.synchronousResetConnection(connectionId); + public ManualOperationResult synchronousResetConnection(final UUID connectionId, final List streamsToReset) { + return temporalClient.synchronousResetConnection(connectionId, streamsToReset); } public void deleteConnection(final UUID connectionId) { diff --git a/airbyte-scheduler/scheduler-models/src/main/java/io/airbyte/scheduler/models/Job.java b/airbyte-scheduler/scheduler-models/src/main/java/io/airbyte/scheduler/models/Job.java index a25d56451edb..ed2f1de729d9 100644 --- a/airbyte-scheduler/scheduler-models/src/main/java/io/airbyte/scheduler/models/Job.java +++ b/airbyte-scheduler/scheduler-models/src/main/java/io/airbyte/scheduler/models/Job.java @@ -109,6 +109,14 @@ public Optional getSuccessOutput() { return getSuccessfulAttempt().flatMap(Attempt::getOutput); } + public Optional getLastFailedAttempt() { + return getAttempts() + .stream() + .sorted(Comparator.comparing(Attempt::getCreatedAtInSecond).reversed()) + .filter(a -> a.getStatus() == AttemptStatus.FAILED) + .findFirst(); + } + public Optional getLastAttemptWithOutput() { return getAttempts() .stream() diff --git a/airbyte-scheduler/scheduler-models/src/test/java/io/airbyte/scheduler/models/JobTest.java b/airbyte-scheduler/scheduler-models/src/test/java/io/airbyte/scheduler/models/JobTest.java index 8fde2d1e75d9..e81a15bf58f2 100644 --- a/airbyte-scheduler/scheduler-models/src/test/java/io/airbyte/scheduler/models/JobTest.java +++ b/airbyte-scheduler/scheduler-models/src/test/java/io/airbyte/scheduler/models/JobTest.java @@ -10,9 +10,9 @@ import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; -import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; +import java.util.stream.IntStream; import org.junit.jupiter.api.Test; class JobTest { @@ -42,8 +42,8 @@ void testHasRunningAttempt() { } private static Job jobWithAttemptWithStatus(final AttemptStatus... attemptStatuses) { - final List attempts = Arrays.stream(attemptStatuses) - .map(attemptStatus -> new Attempt(1L, 1L, null, null, attemptStatus, null, 0L, 0L, null)) + final List attempts = IntStream.range(0, attemptStatuses.length) + .mapToObj(idx -> new Attempt(idx + 1, 1L, null, null, attemptStatuses[idx], null, idx, 0L, null)) .collect(Collectors.toList()); return new Job(1L, null, null, null, attempts, null, 0L, 0L, 0L); } @@ -60,6 +60,17 @@ void testGetSuccessfulAttempt() { assertEquals(job.getAttempts().get(1), job.getSuccessfulAttempt().get()); } + @Test + void testGetLastFailedAttempt() { + assertTrue(jobWithAttemptWithStatus().getLastFailedAttempt().isEmpty()); + assertTrue(jobWithAttemptWithStatus(AttemptStatus.SUCCEEDED).getLastFailedAttempt().isEmpty()); + assertTrue(jobWithAttemptWithStatus(AttemptStatus.FAILED).getLastFailedAttempt().isPresent()); + + final Job job = jobWithAttemptWithStatus(AttemptStatus.FAILED, AttemptStatus.FAILED); + assertTrue(job.getLastFailedAttempt().isPresent()); + assertEquals(2, job.getLastFailedAttempt().get().getId()); + } + @Test void testValidateStatusTransitionFromPending() { final Job pendingJob = jobWithStatus(JobStatus.PENDING); diff --git a/airbyte-scheduler/scheduler-persistence/build.gradle b/airbyte-scheduler/scheduler-persistence/build.gradle index c358972c6735..c40c4355a6ae 100644 --- a/airbyte-scheduler/scheduler-persistence/build.gradle +++ b/airbyte-scheduler/scheduler-persistence/build.gradle @@ -3,6 +3,8 @@ plugins { } dependencies { + implementation 'io.sentry:sentry:6.1.0' + implementation project(':airbyte-analytics') implementation project(':airbyte-commons-docker') implementation project(':airbyte-config:config-models') @@ -16,7 +18,7 @@ dependencies { implementation project(':airbyte-scheduler:scheduler-models') testImplementation libs.flyway.core - testImplementation libs.testcontainers.postgresql + testImplementation libs.platform.testcontainers.postgresql testImplementation project(':airbyte-test-utils') } diff --git a/airbyte-scheduler/scheduler-persistence/src/main/java/io/airbyte/scheduler/persistence/DefaultJobCreator.java b/airbyte-scheduler/scheduler-persistence/src/main/java/io/airbyte/scheduler/persistence/DefaultJobCreator.java index c4ba7efb138c..fd29b5dd32b5 100644 --- a/airbyte-scheduler/scheduler-persistence/src/main/java/io/airbyte/scheduler/persistence/DefaultJobCreator.java +++ b/airbyte-scheduler/scheduler-persistence/src/main/java/io/airbyte/scheduler/persistence/DefaultJobCreator.java @@ -11,31 +11,37 @@ import io.airbyte.config.JobResetConnectionConfig; import io.airbyte.config.JobSyncConfig; import io.airbyte.config.JobTypeResourceLimit.JobType; +import io.airbyte.config.ResetSourceConfiguration; import io.airbyte.config.ResourceRequirements; import io.airbyte.config.SourceConnection; import io.airbyte.config.StandardSync; import io.airbyte.config.StandardSyncOperation; -import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.config.State; +import io.airbyte.config.helpers.StateMessageHelper; +import io.airbyte.config.persistence.StatePersistence; +import io.airbyte.protocol.models.CatalogHelpers; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.DestinationSyncMode; +import io.airbyte.protocol.models.StreamDescriptor; import io.airbyte.protocol.models.SyncMode; import java.io.IOException; import java.util.List; import java.util.Optional; +import java.util.UUID; import javax.annotation.Nullable; public class DefaultJobCreator implements JobCreator { private final JobPersistence jobPersistence; - private final ConfigRepository configRepository; private final ResourceRequirements workerResourceRequirements; + private final StatePersistence statePersistence; public DefaultJobCreator(final JobPersistence jobPersistence, - final ConfigRepository configRepository, - final ResourceRequirements workerResourceRequirements) { + final ResourceRequirements workerResourceRequirements, + final StatePersistence statePersistence) { this.jobPersistence = jobPersistence; - this.configRepository = configRepository; this.workerResourceRequirements = workerResourceRequirements; + this.statePersistence = statePersistence; } @Override @@ -74,7 +80,7 @@ public Optional createSyncJob(final SourceConnection source, workerResourceRequirements, JobType.SYNC)); - configRepository.getConnectionState(standardSync.getConnectionId()).ifPresent(jobSyncConfig::withState); + getCurrentConnectionState(standardSync.getConnectionId()).ifPresent(jobSyncConfig::withState); final JobConfig jobConfig = new JobConfig() .withConfigType(ConfigType.SYNC) @@ -82,23 +88,28 @@ public Optional createSyncJob(final SourceConnection source, return jobPersistence.enqueueJob(standardSync.getConnectionId().toString(), jobConfig); } - // Strategy: - // 1. Set all streams to full refresh - overwrite. - // 2. Create a job where the source emits no records. - // 3. Run a sync from the empty source to the destination. This will overwrite all data for each - // stream in the destination. - // 4. The Empty source emits no state message, so state will start at null (i.e. start from the - // beginning on the next sync). @Override public Optional createResetConnectionJob(final DestinationConnection destination, final StandardSync standardSync, final String destinationDockerImage, - final List standardSyncOperations) + final List standardSyncOperations, + final List streamsToReset) throws IOException { final ConfiguredAirbyteCatalog configuredAirbyteCatalog = standardSync.getCatalog(); configuredAirbyteCatalog.getStreams().forEach(configuredAirbyteStream -> { + final StreamDescriptor streamDescriptor = CatalogHelpers.extractDescriptor(configuredAirbyteStream); configuredAirbyteStream.setSyncMode(SyncMode.FULL_REFRESH); - configuredAirbyteStream.setDestinationSyncMode(DestinationSyncMode.OVERWRITE); + if (streamsToReset.contains(streamDescriptor)) { + // The Reset Source will emit no record messages for any streams, so setting the destination sync + // mode to OVERWRITE will empty out this stream in the destination. + // Note: streams in streamsToReset that are NOT in this configured catalog (i.e. deleted streams) + // will still have their state reset by the Reset Source, but will not be modified in the + // destination since they are not present in the catalog that is sent to the destination. + configuredAirbyteStream.setDestinationSyncMode(DestinationSyncMode.OVERWRITE); + } else { + // Set streams that are not being reset to APPEND so that they are not modified in the destination + configuredAirbyteStream.setDestinationSyncMode(DestinationSyncMode.APPEND); + } }); final JobResetConnectionConfig resetConnectionConfig = new JobResetConnectionConfig() .withNamespaceDefinition(standardSync.getNamespaceDefinition()) @@ -110,7 +121,10 @@ public Optional createResetConnectionJob(final DestinationConnection desti .withConfiguredAirbyteCatalog(configuredAirbyteCatalog) .withResourceRequirements(ResourceRequirementsUtils.getResourceRequirements( standardSync.getResourceRequirements(), - workerResourceRequirements)); + workerResourceRequirements)) + .withResetSourceConfiguration(new ResetSourceConfiguration().withStreamsToReset(streamsToReset)); + + getCurrentConnectionState(standardSync.getConnectionId()).ifPresent(resetConnectionConfig::withState); final JobConfig jobConfig = new JobConfig() .withConfigType(ConfigType.RESET_CONNECTION) @@ -118,4 +132,8 @@ public Optional createResetConnectionJob(final DestinationConnection desti return jobPersistence.enqueueJob(standardSync.getConnectionId().toString(), jobConfig); } + private Optional getCurrentConnectionState(final UUID connectionId) throws IOException { + return statePersistence.getCurrentState(connectionId).map(StateMessageHelper::getState); + } + } diff --git a/airbyte-scheduler/scheduler-persistence/src/main/java/io/airbyte/scheduler/persistence/JobCreator.java b/airbyte-scheduler/scheduler-persistence/src/main/java/io/airbyte/scheduler/persistence/JobCreator.java index 4d11673c8897..d80cd36ba4f9 100644 --- a/airbyte-scheduler/scheduler-persistence/src/main/java/io/airbyte/scheduler/persistence/JobCreator.java +++ b/airbyte-scheduler/scheduler-persistence/src/main/java/io/airbyte/scheduler/persistence/JobCreator.java @@ -9,6 +9,7 @@ import io.airbyte.config.SourceConnection; import io.airbyte.config.StandardSync; import io.airbyte.config.StandardSyncOperation; +import io.airbyte.protocol.models.StreamDescriptor; import java.io.IOException; import java.util.List; import java.util.Optional; @@ -41,13 +42,15 @@ Optional createSyncJob(SourceConnection source, * @param destination db model representing where data goes * @param standardSync sync options * @param destinationDockerImage docker image to use for the destination + * @param streamsToReset * @return the new job if no other conflicting job was running, otherwise empty * @throws IOException if something wrong happens */ Optional createResetConnectionJob(DestinationConnection destination, StandardSync standardSync, String destinationDockerImage, - List standardSyncOperations) + List standardSyncOperations, + List streamsToReset) throws IOException; } diff --git a/airbyte-scheduler/scheduler-persistence/src/main/java/io/airbyte/scheduler/persistence/job_error_reporter/JobErrorReporter.java b/airbyte-scheduler/scheduler-persistence/src/main/java/io/airbyte/scheduler/persistence/job_error_reporter/JobErrorReporter.java new file mode 100644 index 000000000000..c82cae5dcd95 --- /dev/null +++ b/airbyte-scheduler/scheduler-persistence/src/main/java/io/airbyte/scheduler/persistence/job_error_reporter/JobErrorReporter.java @@ -0,0 +1,102 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.scheduler.persistence.job_error_reporter; + +import io.airbyte.config.AttemptFailureSummary; +import io.airbyte.config.Configs.DeploymentMode; +import io.airbyte.config.FailureReason; +import io.airbyte.config.FailureReason.FailureOrigin; +import io.airbyte.config.JobSyncConfig; +import io.airbyte.config.StandardDestinationDefinition; +import io.airbyte.config.StandardSourceDefinition; +import io.airbyte.config.StandardWorkspace; +import io.airbyte.config.persistence.ConfigRepository; +import java.util.HashMap; +import java.util.List; +import java.util.UUID; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class JobErrorReporter { + + private static final Logger LOGGER = LoggerFactory.getLogger(JobErrorReporter.class); + + private static final String FROM_TRACE_MESSAGE = "from_trace_message"; + private static final String DEPLOYMENT_MODE_META_KEY = "deployment_mode"; + private static final String AIRBYTE_VERSION_META_KEY = "airbyte_version"; + private static final String FAILURE_ORIGIN_META_KEY = "failure_origin"; + private static final String FAILURE_TYPE_META_KEY = "failure_type"; + private static final String CONNECTION_ID_META_KEY = "connection_id"; + private static final String CONNECTOR_NAME_META_KEY = "connector_name"; + private static final String CONNECTOR_DEFINITION_ID_META_KEY = "connector_definition_id"; + private static final String CONNECTOR_RELEASE_STAGE_META_KEY = "connector_release_stage"; + + private final ConfigRepository configRepository; + private final DeploymentMode deploymentMode; + private final String airbyteVersion; + private final JobErrorReportingClient jobErrorReportingClient; + + public JobErrorReporter(final ConfigRepository configRepository, + final DeploymentMode deploymentMode, + final String airbyteVersion, + final JobErrorReportingClient jobErrorReportingClient) { + + this.configRepository = configRepository; + this.deploymentMode = deploymentMode; + this.airbyteVersion = airbyteVersion; + this.jobErrorReportingClient = jobErrorReportingClient; + } + + /** + * Reports a Sync Job's connector-caused FailureReasons to the JobErrorReportingClient + * + * @param connectionId - connection that had the failure + * @param failureSummary - final attempt failure summary + * @param jobSyncConfig - config for the sync job + */ + public void reportSyncJobFailure(final UUID connectionId, final AttemptFailureSummary failureSummary, final JobSyncConfig jobSyncConfig) { + final List traceMessageFailures = failureSummary.getFailures().stream() + .filter(failure -> failure.getMetadata() != null && failure.getMetadata().getAdditionalProperties().containsKey(FROM_TRACE_MESSAGE)) + .toList(); + + final StandardWorkspace workspace = configRepository.getStandardWorkspaceFromConnection(connectionId, true); + + for (final FailureReason failureReason : traceMessageFailures) { + final FailureOrigin failureOrigin = failureReason.getFailureOrigin(); + + final HashMap metadata = new HashMap<>(); + metadata.put(CONNECTION_ID_META_KEY, connectionId.toString()); + metadata.put(AIRBYTE_VERSION_META_KEY, airbyteVersion); + metadata.put(DEPLOYMENT_MODE_META_KEY, deploymentMode.name()); + metadata.put(FAILURE_ORIGIN_META_KEY, failureOrigin.value()); + metadata.put(FAILURE_TYPE_META_KEY, failureReason.getFailureType().value()); + + try { + if (failureOrigin == FailureOrigin.SOURCE) { + final StandardSourceDefinition sourceDefinition = configRepository.getSourceDefinitionFromConnection(connectionId); + final String dockerImage = jobSyncConfig.getSourceDockerImage(); + + metadata.put(CONNECTOR_DEFINITION_ID_META_KEY, sourceDefinition.getSourceDefinitionId().toString()); + metadata.put(CONNECTOR_NAME_META_KEY, sourceDefinition.getName()); + metadata.put(CONNECTOR_RELEASE_STAGE_META_KEY, sourceDefinition.getReleaseStage().value()); + + jobErrorReportingClient.reportJobFailureReason(workspace, failureReason, dockerImage, metadata); + } else if (failureOrigin == FailureOrigin.DESTINATION) { + final StandardDestinationDefinition destinationDefinition = configRepository.getDestinationDefinitionFromConnection(connectionId); + final String dockerImage = jobSyncConfig.getDestinationDockerImage(); + + metadata.put(CONNECTOR_DEFINITION_ID_META_KEY, destinationDefinition.getDestinationDefinitionId().toString()); + metadata.put(CONNECTOR_NAME_META_KEY, destinationDefinition.getName()); + metadata.put(CONNECTOR_RELEASE_STAGE_META_KEY, destinationDefinition.getReleaseStage().value()); + + jobErrorReportingClient.reportJobFailureReason(workspace, failureReason, dockerImage, metadata); + } + } catch (final Exception e) { + LOGGER.error("Error when reporting job failure reason: {}", failureReason, e); + } + } + } + +} diff --git a/airbyte-scheduler/scheduler-persistence/src/main/java/io/airbyte/scheduler/persistence/job_error_reporter/JobErrorReportingClient.java b/airbyte-scheduler/scheduler-persistence/src/main/java/io/airbyte/scheduler/persistence/job_error_reporter/JobErrorReportingClient.java new file mode 100644 index 000000000000..3d52f558b667 --- /dev/null +++ b/airbyte-scheduler/scheduler-persistence/src/main/java/io/airbyte/scheduler/persistence/job_error_reporter/JobErrorReportingClient.java @@ -0,0 +1,21 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.scheduler.persistence.job_error_reporter; + +import io.airbyte.config.FailureReason; +import io.airbyte.config.StandardWorkspace; +import java.util.Map; + +/** + * A generic interface for a client that reports errors + */ +public interface JobErrorReportingClient { + + /** + * Report a job failure reason + */ + void reportJobFailureReason(StandardWorkspace workspace, final FailureReason reason, final String dockerImage, Map metadata); + +} diff --git a/airbyte-scheduler/scheduler-persistence/src/main/java/io/airbyte/scheduler/persistence/job_error_reporter/JobErrorReportingClientFactory.java b/airbyte-scheduler/scheduler-persistence/src/main/java/io/airbyte/scheduler/persistence/job_error_reporter/JobErrorReportingClientFactory.java new file mode 100644 index 000000000000..e24586781fc7 --- /dev/null +++ b/airbyte-scheduler/scheduler-persistence/src/main/java/io/airbyte/scheduler/persistence/job_error_reporter/JobErrorReportingClientFactory.java @@ -0,0 +1,25 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.scheduler.persistence.job_error_reporter; + +import io.airbyte.config.Configs; +import io.airbyte.config.Configs.JobErrorReportingStrategy; + +public class JobErrorReportingClientFactory { + + /** + * Creates an error reporting client based on the desired strategy to use + * + * @param strategy - which type of error reporting client should be created + * @return JobErrorReportingClient + */ + public static JobErrorReportingClient getClient(final JobErrorReportingStrategy strategy, final Configs configs) { + return switch (strategy) { + case SENTRY -> new SentryJobErrorReportingClient(configs.getJobErrorReportingSentryDSN(), new SentryExceptionHelper()); + case LOGGING -> new LoggingJobErrorReportingClient(); + }; + } + +} diff --git a/airbyte-scheduler/scheduler-persistence/src/main/java/io/airbyte/scheduler/persistence/job_error_reporter/LoggingJobErrorReportingClient.java b/airbyte-scheduler/scheduler-persistence/src/main/java/io/airbyte/scheduler/persistence/job_error_reporter/LoggingJobErrorReportingClient.java new file mode 100644 index 000000000000..cf1cebf1404b --- /dev/null +++ b/airbyte-scheduler/scheduler-persistence/src/main/java/io/airbyte/scheduler/persistence/job_error_reporter/LoggingJobErrorReportingClient.java @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.scheduler.persistence.job_error_reporter; + +import io.airbyte.config.FailureReason; +import io.airbyte.config.StandardWorkspace; +import java.util.Map; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class LoggingJobErrorReportingClient implements JobErrorReportingClient { + + private static final Logger LOGGER = LoggerFactory.getLogger(LoggingJobErrorReportingClient.class); + + @Override + public void reportJobFailureReason(final StandardWorkspace workspace, + final FailureReason reason, + final String dockerImage, + final Map metadata) { + LOGGER.info("Report Job Error -> workspaceId: {}, dockerImage: {}, failureReason: {}, metadata: {}", + workspace.getWorkspaceId(), + dockerImage, + reason, + metadata); + } + +} diff --git a/airbyte-scheduler/scheduler-persistence/src/main/java/io/airbyte/scheduler/persistence/job_error_reporter/SentryExceptionHelper.java b/airbyte-scheduler/scheduler-persistence/src/main/java/io/airbyte/scheduler/persistence/job_error_reporter/SentryExceptionHelper.java new file mode 100644 index 000000000000..1fe083490c12 --- /dev/null +++ b/airbyte-scheduler/scheduler-persistence/src/main/java/io/airbyte/scheduler/persistence/job_error_reporter/SentryExceptionHelper.java @@ -0,0 +1,169 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.scheduler.persistence.job_error_reporter; + +import io.airbyte.commons.lang.Exceptions; +import io.sentry.protocol.SentryException; +import io.sentry.protocol.SentryStackFrame; +import io.sentry.protocol.SentryStackTrace; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Optional; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public class SentryExceptionHelper { + + /** + * Processes a raw stacktrace string into structured SentryExceptions + *

+ * Currently, Java and Python stacktraces are supported. If an unsupported stacktrace format is + * encountered, an empty optional will be returned, in which case we can fall back to alternate + * grouping. + */ + public Optional> buildSentryExceptions(final String stacktrace) { + return Exceptions.swallowWithDefault(() -> { + if (stacktrace.startsWith("Traceback (most recent call last):")) { + return buildPythonSentryExceptions(stacktrace); + } + if (stacktrace.contains("\tat ") && stacktrace.contains(".java")) { + return buildJavaSentryExceptions(stacktrace); + } + + return Optional.empty(); + }, Optional.empty()); + } + + private static Optional> buildPythonSentryExceptions(final String stacktrace) { + final List sentryExceptions = new ArrayList<>(); + + // separate chained exceptions + // e.g "\n\nThe above exception was the direct cause of the following exception:\n\n" + // "\n\nDuring handling of the above exception, another exception occurred:\n\n" + final String exceptionSeparator = "\n\n[\\w ,]+:\n\n"; + final String[] exceptions = stacktrace.split(exceptionSeparator); + + for (final String exceptionStr : exceptions) { + final SentryStackTrace stackTrace = new SentryStackTrace(); + final List stackFrames = new ArrayList<>(); + + // Use a regex to grab stack trace frame information + final Pattern framePattern = Pattern.compile("File \"(?.+)\", line (?\\d+), in (?.+)\\n {4}(?.+)\\n"); + final Matcher matcher = framePattern.matcher(exceptionStr); + int lastMatchIdx = -1; + + while (matcher.find()) { + final String absPath = matcher.group("absPath"); + final String lineno = matcher.group("lineno"); + final String function = matcher.group("function"); + final String contextLine = matcher.group("contextLine"); + + final SentryStackFrame stackFrame = new SentryStackFrame(); + stackFrame.setAbsPath(absPath); + stackFrame.setLineno(Integer.valueOf(lineno)); + stackFrame.setFunction(function); + stackFrame.setContextLine(contextLine); + stackFrames.add(stackFrame); + + lastMatchIdx = matcher.end(); + } + + if (stackFrames.size() > 0) { + stackTrace.setFrames(stackFrames); + + final SentryException sentryException = new SentryException(); + sentryException.setStacktrace(stackTrace); + + // The final part of our stack trace has the exception type and (optionally) a value + // (e.g. "RuntimeError: This is the value") + final String remaining = exceptionStr.substring(lastMatchIdx); + final String[] parts = remaining.split(":", 2); + + if (parts.length > 0) { + sentryException.setType(parts[0].trim()); + if (parts.length == 2) { + sentryException.setValue(parts[1].trim()); + } + + sentryExceptions.add(sentryException); + } + } + } + + if (sentryExceptions.size() == 0) + return Optional.empty(); + + return Optional.of(sentryExceptions); + } + + private static Optional> buildJavaSentryExceptions(final String stacktrace) { + final List sentryExceptions = new ArrayList<>(); + + // separate chained exceptions + // e.g "\nCaused By: " + final String exceptionSeparator = "\n[\\w ]+: "; + final String[] exceptions = stacktrace.split(exceptionSeparator); + + for (final String exceptionStr : exceptions) { + final SentryStackTrace stackTrace = new SentryStackTrace(); + final List stackFrames = new ArrayList<>(); + + // Use a regex to grab stack trace frame information + final Pattern framePattern = Pattern.compile( + "\n\tat (?:[\\w.$/]+/)?(?[\\w$.]+)\\.(?[\\w<>$]+)\\((?:(?[\\w]+\\.java):(?\\d+)\\)|(?[\\w\\s]*))"); + final Matcher matcher = framePattern.matcher(exceptionStr); + + while (matcher.find()) { + final String module = matcher.group("module"); + final String filename = matcher.group("filename"); + final String lineno = matcher.group("lineno"); + final String function = matcher.group("function"); + final String sourceDescription = matcher.group("desc"); + + final SentryStackFrame stackFrame = new SentryStackFrame(); + stackFrame.setModule(module); + stackFrame.setFunction(function); + stackFrame.setFilename(filename); + + if (lineno != null) { + stackFrame.setLineno(Integer.valueOf(lineno)); + } + if (sourceDescription != null && sourceDescription.equals("Native Method")) { + stackFrame.setNative(true); + } + + stackFrames.add(stackFrame); + } + + if (stackFrames.size() > 0) { + Collections.reverse(stackFrames); + stackTrace.setFrames(stackFrames); + + final SentryException sentryException = new SentryException(); + sentryException.setStacktrace(stackTrace); + + // The first section of our stacktrace before the first frame has exception type and value + final String[] sections = exceptionStr.split("\n\tat ", 2); + final String[] headerParts = sections[0].split(": ", 2); + + if (headerParts.length > 0) { + sentryException.setType(headerParts[0].trim()); + if (headerParts.length == 2) { + sentryException.setValue(headerParts[1].trim()); + } + + sentryExceptions.add(sentryException); + } + } + } + + if (sentryExceptions.size() == 0) + return Optional.empty(); + + return Optional.of(sentryExceptions); + } + +} diff --git a/airbyte-scheduler/scheduler-persistence/src/main/java/io/airbyte/scheduler/persistence/job_error_reporter/SentryJobErrorReportingClient.java b/airbyte-scheduler/scheduler-persistence/src/main/java/io/airbyte/scheduler/persistence/job_error_reporter/SentryJobErrorReportingClient.java new file mode 100644 index 000000000000..ff509b7ce254 --- /dev/null +++ b/airbyte-scheduler/scheduler-persistence/src/main/java/io/airbyte/scheduler/persistence/job_error_reporter/SentryJobErrorReportingClient.java @@ -0,0 +1,133 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.scheduler.persistence.job_error_reporter; + +import io.airbyte.config.FailureReason; +import io.airbyte.config.Metadata; +import io.airbyte.config.StandardWorkspace; +import io.sentry.Hub; +import io.sentry.IHub; +import io.sentry.NoOpHub; +import io.sentry.SentryEvent; +import io.sentry.SentryOptions; +import io.sentry.protocol.Message; +import io.sentry.protocol.SentryException; +import io.sentry.protocol.User; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +public class SentryJobErrorReportingClient implements JobErrorReportingClient { + + static final String STACKTRACE_PARSE_ERROR_TAG_KEY = "stacktrace_parse_error"; + private final IHub sentryHub; + private final SentryExceptionHelper exceptionHelper; + + SentryJobErrorReportingClient(final IHub sentryHub, final SentryExceptionHelper exceptionHelper) { + this.sentryHub = sentryHub; + this.exceptionHelper = exceptionHelper; + } + + public SentryJobErrorReportingClient(final String sentryDSN, final SentryExceptionHelper exceptionHelper) { + this(createSentryHubWithDSN(sentryDSN), exceptionHelper); + } + + static IHub createSentryHubWithDSN(final String sentryDSN) { + if (sentryDSN == null || sentryDSN.isEmpty()) { + return NoOpHub.getInstance(); + } + + final SentryOptions options = new SentryOptions(); + options.setDsn(sentryDSN); + options.setAttachStacktrace(false); + options.setEnableUncaughtExceptionHandler(false); + return new Hub(options); + } + + /** + * Reports a Connector Job FailureReason to Sentry + * + * @param workspace - Workspace where this failure occurred + * @param failureReason - FailureReason to report + * @param dockerImage - Tagged docker image that represents the release where this failure occurred + * @param metadata - Extra metadata to set as tags on the event + */ + @Override + public void reportJobFailureReason(final StandardWorkspace workspace, + final FailureReason failureReason, + final String dockerImage, + final Map metadata) { + final SentryEvent event = new SentryEvent(); + + // Remove invalid characters from the release name, use @ so sentry knows how to grab the tag + // e.g. airbyte/source-xyz:1.2.0 -> airbyte-source-xyz@1.2.0 + // More info at https://docs.sentry.io/product/cli/releases/#creating-releases + final String release = dockerImage.replace("/", "-").replace(":", "@"); + event.setRelease(release); + + // enhance event fingerprint to ensure separate grouping per connector + final String[] releaseParts = release.split("@"); + if (releaseParts.length > 0) { + event.setFingerprints(List.of("{{ default }}", releaseParts[0])); + } + + // set workspace as the user in sentry to get impact and priority + final User sentryUser = new User(); + sentryUser.setId(String.valueOf(workspace.getWorkspaceId())); + sentryUser.setUsername(workspace.getName()); + event.setUser(sentryUser); + + // set metadata as tags + event.setTags(metadata); + + // set failure reason's internalMessage as event message + // Sentry will use this to fuzzy-group if no stacktrace information is available + final Message message = new Message(); + message.setFormatted(failureReason.getInternalMessage()); + event.setMessage(message); + + // events can come from any platform + event.setPlatform("other"); + + // attach failure reason stack trace + final String failureStackTrace = failureReason.getStacktrace(); + if (failureStackTrace != null && !failureStackTrace.isBlank()) { + final Optional> parsedExceptions = exceptionHelper.buildSentryExceptions(failureStackTrace); + if (parsedExceptions.isPresent()) { + event.setExceptions(parsedExceptions.get()); + } else { + event.setTag(STACKTRACE_PARSE_ERROR_TAG_KEY, "1"); + + // We couldn't parse the stacktrace, but we can still give it to Sentry for (less accurate) grouping + final String normalizedStacktrace = failureStackTrace + .replace("\n", ", ") + .replace(failureReason.getInternalMessage(), ""); + + final SentryException sentryException = new SentryException(); + sentryException.setValue(normalizedStacktrace); + event.setExceptions(List.of(sentryException)); + } + } + + sentryHub.configureScope(scope -> { + final Map failureReasonContext = new HashMap<>(); + failureReasonContext.put("internalMessage", failureReason.getInternalMessage()); + failureReasonContext.put("externalMessage", failureReason.getExternalMessage()); + failureReasonContext.put("stacktrace", failureReason.getStacktrace()); + failureReasonContext.put("timestamp", failureReason.getTimestamp().toString()); + + final Metadata failureReasonMeta = failureReason.getMetadata(); + if (failureReasonMeta != null) { + failureReasonContext.put("metadata", failureReasonMeta.toString()); + } + + scope.setContexts("Failure Reason", failureReasonContext); + }); + + sentryHub.captureEvent(event); + } + +} diff --git a/airbyte-scheduler/scheduler-persistence/src/test/java/io/airbyte/scheduler/persistence/DefaultJobCreatorTest.java b/airbyte-scheduler/scheduler-persistence/src/test/java/io/airbyte/scheduler/persistence/DefaultJobCreatorTest.java index 61dd61563ede..fc9a1d0e8e8a 100644 --- a/airbyte-scheduler/scheduler-persistence/src/test/java/io/airbyte/scheduler/persistence/DefaultJobCreatorTest.java +++ b/airbyte-scheduler/scheduler-persistence/src/test/java/io/airbyte/scheduler/persistence/DefaultJobCreatorTest.java @@ -25,21 +25,26 @@ import io.airbyte.config.JobTypeResourceLimit.JobType; import io.airbyte.config.OperatorNormalization; import io.airbyte.config.OperatorNormalization.Option; +import io.airbyte.config.ResetSourceConfiguration; import io.airbyte.config.ResourceRequirements; import io.airbyte.config.SourceConnection; import io.airbyte.config.StandardSync; import io.airbyte.config.StandardSyncOperation; import io.airbyte.config.StandardSyncOperation.OperatorType; -import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.config.State; +import io.airbyte.config.helpers.StateMessageHelper; +import io.airbyte.config.persistence.StatePersistence; import io.airbyte.protocol.models.CatalogHelpers; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.ConfiguredAirbyteStream; import io.airbyte.protocol.models.DestinationSyncMode; import io.airbyte.protocol.models.Field; import io.airbyte.protocol.models.JsonSchemaType; +import io.airbyte.protocol.models.StreamDescriptor; +import io.airbyte.protocol.models.SyncMode; import java.io.IOException; -import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.Optional; import java.util.UUID; import org.junit.jupiter.api.BeforeEach; @@ -47,8 +52,13 @@ public class DefaultJobCreatorTest { - private static final String STREAM_NAME = "users"; + private static final String STREAM1_NAME = "stream1"; + private static final String STREAM2_NAME = "stream2"; + private static final String STREAM3_NAME = "stream3"; + private static final String NAMESPACE = "namespace"; private static final String FIELD_NAME = "id"; + private static final StreamDescriptor STREAM1_DESCRIPTOR = new StreamDescriptor().withName(STREAM1_NAME); + private static final StreamDescriptor STREAM2_DESCRIPTOR = new StreamDescriptor().withName(STREAM2_NAME).withNamespace(NAMESPACE); private static final String SOURCE_IMAGE_NAME = "daxtarity/sourceimagename"; private static final String DESTINATION_IMAGE_NAME = "daxtarity/destinationimagename"; @@ -59,7 +69,7 @@ public class DefaultJobCreatorTest { private static final long JOB_ID = 12L; private JobPersistence jobPersistence; - private ConfigRepository configRepository; + private StatePersistence statePersistence; private JobCreator jobCreator; private ResourceRequirements workerResourceRequirements; @@ -90,13 +100,17 @@ public class DefaultJobCreatorTest { .withConfiguration(implementationJson) .withTombstone(false); - final ConfiguredAirbyteStream stream = new ConfiguredAirbyteStream() - .withStream(CatalogHelpers.createAirbyteStream(STREAM_NAME, Field.of(FIELD_NAME, JsonSchemaType.STRING))); - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(Collections.singletonList(stream)); - final UUID connectionId = UUID.randomUUID(); final UUID operationId = UUID.randomUUID(); + final ConfiguredAirbyteStream stream1 = new ConfiguredAirbyteStream() + .withStream(CatalogHelpers.createAirbyteStream(STREAM1_NAME, Field.of(FIELD_NAME, JsonSchemaType.STRING))); + final ConfiguredAirbyteStream stream2 = new ConfiguredAirbyteStream() + .withStream(CatalogHelpers.createAirbyteStream(STREAM2_NAME, NAMESPACE, Field.of(FIELD_NAME, JsonSchemaType.STRING))); + final ConfiguredAirbyteStream stream3 = new ConfiguredAirbyteStream() + .withStream(CatalogHelpers.createAirbyteStream(STREAM3_NAME, NAMESPACE, Field.of(FIELD_NAME, JsonSchemaType.STRING))); + final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(List.of(stream1, stream2, stream3)); + STANDARD_SYNC = new StandardSync() .withConnectionId(connectionId) .withName("presto to hudi") @@ -120,13 +134,13 @@ public class DefaultJobCreatorTest { @BeforeEach void setup() { jobPersistence = mock(JobPersistence.class); - configRepository = mock(ConfigRepository.class); + statePersistence = mock(StatePersistence.class); workerResourceRequirements = new ResourceRequirements() .withCpuLimit("0.2") .withCpuRequest("0.2") .withMemoryLimit("200Mi") .withMemoryRequest("200Mi"); - jobCreator = new DefaultJobCreator(jobPersistence, configRepository, workerResourceRequirements); + jobCreator = new DefaultJobCreator(jobPersistence, workerResourceRequirements, statePersistence); } @Test @@ -322,14 +336,27 @@ void testCreateSyncJobSourceAndDestinationResourceReqs() throws IOException { @Test void testCreateResetConnectionJob() throws IOException { - final ConfiguredAirbyteCatalog expectedCatalog = STANDARD_SYNC.getCatalog(); - expectedCatalog.getStreams() - .forEach(configuredAirbyteStream -> { - configuredAirbyteStream.setSyncMode(io.airbyte.protocol.models.SyncMode.FULL_REFRESH); - configuredAirbyteStream.setDestinationSyncMode(DestinationSyncMode.OVERWRITE); - }); - - final JobResetConnectionConfig JobResetConnectionConfig = new JobResetConnectionConfig() + final List streamsToReset = List.of(STREAM1_DESCRIPTOR, STREAM2_DESCRIPTOR); + final ConfiguredAirbyteCatalog expectedCatalog = new ConfiguredAirbyteCatalog().withStreams(List.of( + new ConfiguredAirbyteStream() + .withStream(CatalogHelpers.createAirbyteStream(STREAM1_NAME, Field.of(FIELD_NAME, JsonSchemaType.STRING))) + .withSyncMode(SyncMode.FULL_REFRESH) + .withDestinationSyncMode(DestinationSyncMode.OVERWRITE), + new ConfiguredAirbyteStream() + .withStream(CatalogHelpers.createAirbyteStream(STREAM2_NAME, NAMESPACE, Field.of(FIELD_NAME, JsonSchemaType.STRING))) + .withSyncMode(SyncMode.FULL_REFRESH) + .withDestinationSyncMode(DestinationSyncMode.OVERWRITE), + // this stream is not being reset, so it should have APPEND destination sync mode + new ConfiguredAirbyteStream() + .withStream(CatalogHelpers.createAirbyteStream(STREAM3_NAME, NAMESPACE, Field.of(FIELD_NAME, JsonSchemaType.STRING))) + .withSyncMode(SyncMode.FULL_REFRESH) + .withDestinationSyncMode(DestinationSyncMode.APPEND))); + + final State connectionState = new State().withState(Jsons.jsonNode(Map.of("key", "val"))); + when(statePersistence.getCurrentState(STANDARD_SYNC.getConnectionId())) + .thenReturn(StateMessageHelper.getTypedState(connectionState.getState(), false)); + + final JobResetConnectionConfig jobResetConnectionConfig = new JobResetConnectionConfig() .withNamespaceDefinition(STANDARD_SYNC.getNamespaceDefinition()) .withNamespaceFormat(STANDARD_SYNC.getNamespaceFormat()) .withPrefix(STANDARD_SYNC.getPrefix()) @@ -337,33 +364,52 @@ void testCreateResetConnectionJob() throws IOException { .withDestinationDockerImage(DESTINATION_IMAGE_NAME) .withConfiguredAirbyteCatalog(expectedCatalog) .withOperationSequence(List.of(STANDARD_SYNC_OPERATION)) - .withResourceRequirements(workerResourceRequirements); + .withResourceRequirements(workerResourceRequirements) + .withResetSourceConfiguration(new ResetSourceConfiguration().withStreamsToReset(streamsToReset)) + .withState(connectionState); final JobConfig jobConfig = new JobConfig() .withConfigType(ConfigType.RESET_CONNECTION) - .withResetConnection(JobResetConnectionConfig); + .withResetConnection(jobResetConnectionConfig); final String expectedScope = STANDARD_SYNC.getConnectionId().toString(); when(jobPersistence.enqueueJob(expectedScope, jobConfig)).thenReturn(Optional.of(JOB_ID)); - final long jobId = jobCreator.createResetConnectionJob( + final Optional jobId = jobCreator.createResetConnectionJob( DESTINATION_CONNECTION, STANDARD_SYNC, DESTINATION_IMAGE_NAME, - List.of(STANDARD_SYNC_OPERATION)).orElseThrow(); - assertEquals(JOB_ID, jobId); + List.of(STANDARD_SYNC_OPERATION), + streamsToReset); + + verify(jobPersistence).enqueueJob(expectedScope, jobConfig); + assertTrue(jobId.isPresent()); + assertEquals(JOB_ID, jobId.get()); } @Test void testCreateResetConnectionJobEnsureNoQueuing() throws IOException { - final ConfiguredAirbyteCatalog expectedCatalog = STANDARD_SYNC.getCatalog(); - expectedCatalog.getStreams() - .forEach(configuredAirbyteStream -> { - configuredAirbyteStream.setSyncMode(io.airbyte.protocol.models.SyncMode.FULL_REFRESH); - configuredAirbyteStream.setDestinationSyncMode(DestinationSyncMode.OVERWRITE); - }); - - final JobResetConnectionConfig JobResetConnectionConfig = new JobResetConnectionConfig() + final List streamsToReset = List.of(STREAM1_DESCRIPTOR, STREAM2_DESCRIPTOR); + final ConfiguredAirbyteCatalog expectedCatalog = new ConfiguredAirbyteCatalog().withStreams(List.of( + new ConfiguredAirbyteStream() + .withStream(CatalogHelpers.createAirbyteStream(STREAM1_NAME, Field.of(FIELD_NAME, JsonSchemaType.STRING))) + .withSyncMode(SyncMode.FULL_REFRESH) + .withDestinationSyncMode(DestinationSyncMode.OVERWRITE), + new ConfiguredAirbyteStream() + .withStream(CatalogHelpers.createAirbyteStream(STREAM2_NAME, NAMESPACE, Field.of(FIELD_NAME, JsonSchemaType.STRING))) + .withSyncMode(SyncMode.FULL_REFRESH) + .withDestinationSyncMode(DestinationSyncMode.OVERWRITE), + // this stream is not being reset, so it should have APPEND destination sync mode + new ConfiguredAirbyteStream() + .withStream(CatalogHelpers.createAirbyteStream(STREAM3_NAME, NAMESPACE, Field.of(FIELD_NAME, JsonSchemaType.STRING))) + .withSyncMode(SyncMode.FULL_REFRESH) + .withDestinationSyncMode(DestinationSyncMode.APPEND))); + + final State connectionState = new State().withState(Jsons.jsonNode(Map.of("key", "val"))); + when(statePersistence.getCurrentState(STANDARD_SYNC.getConnectionId())) + .thenReturn(StateMessageHelper.getTypedState(connectionState.getState(), false)); + + final JobResetConnectionConfig jobResetConnectionConfig = new JobResetConnectionConfig() .withNamespaceDefinition(STANDARD_SYNC.getNamespaceDefinition()) .withNamespaceFormat(STANDARD_SYNC.getNamespaceFormat()) .withPrefix(STANDARD_SYNC.getPrefix()) @@ -371,20 +417,26 @@ void testCreateResetConnectionJobEnsureNoQueuing() throws IOException { .withDestinationDockerImage(DESTINATION_IMAGE_NAME) .withConfiguredAirbyteCatalog(expectedCatalog) .withOperationSequence(List.of(STANDARD_SYNC_OPERATION)) - .withResourceRequirements(workerResourceRequirements); + .withResourceRequirements(workerResourceRequirements) + .withResetSourceConfiguration(new ResetSourceConfiguration().withStreamsToReset(streamsToReset)) + .withState(connectionState); final JobConfig jobConfig = new JobConfig() .withConfigType(ConfigType.RESET_CONNECTION) - .withResetConnection(JobResetConnectionConfig); + .withResetConnection(jobResetConnectionConfig); final String expectedScope = STANDARD_SYNC.getConnectionId().toString(); when(jobPersistence.enqueueJob(expectedScope, jobConfig)).thenReturn(Optional.empty()); - assertTrue(jobCreator.createResetConnectionJob( + final Optional jobId = jobCreator.createResetConnectionJob( DESTINATION_CONNECTION, STANDARD_SYNC, DESTINATION_IMAGE_NAME, - List.of(STANDARD_SYNC_OPERATION)).isEmpty()); + List.of(STANDARD_SYNC_OPERATION), + streamsToReset); + + verify(jobPersistence).enqueueJob(expectedScope, jobConfig); + assertTrue(jobId.isEmpty()); } } diff --git a/airbyte-scheduler/scheduler-persistence/src/test/java/io/airbyte/scheduler/persistence/job_error_reporter/JobErrorReporterTest.java b/airbyte-scheduler/scheduler-persistence/src/test/java/io/airbyte/scheduler/persistence/job_error_reporter/JobErrorReporterTest.java new file mode 100644 index 000000000000..ae99ad02ad53 --- /dev/null +++ b/airbyte-scheduler/scheduler-persistence/src/test/java/io/airbyte/scheduler/persistence/job_error_reporter/JobErrorReporterTest.java @@ -0,0 +1,147 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.scheduler.persistence.job_error_reporter; + +import static org.mockito.Mockito.mock; + +import io.airbyte.config.AttemptFailureSummary; +import io.airbyte.config.Configs.DeploymentMode; +import io.airbyte.config.FailureReason; +import io.airbyte.config.FailureReason.FailureOrigin; +import io.airbyte.config.FailureReason.FailureType; +import io.airbyte.config.JobSyncConfig; +import io.airbyte.config.Metadata; +import io.airbyte.config.StandardDestinationDefinition; +import io.airbyte.config.StandardSourceDefinition; +import io.airbyte.config.StandardWorkspace; +import io.airbyte.config.persistence.ConfigRepository; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +public class JobErrorReporterTest { + + private static final UUID CONNECTION_ID = UUID.randomUUID(); + private static final DeploymentMode DEPLOYMENT_MODE = DeploymentMode.OSS; + private static final String AIRBYTE_VERSION = "0.1.40"; + private static final UUID SOURCE_DEFINITION_ID = UUID.randomUUID(); + private static final String SOURCE_DEFINITION_NAME = "stripe"; + private static final String SOURCE_DOCKER_IMAGE = "airbyte/source-stripe:1.2.3"; + private static final StandardSourceDefinition.ReleaseStage SOURCE_RELEASE_STAGE = StandardSourceDefinition.ReleaseStage.BETA; + private static final UUID DESTINATION_DEFINITION_ID = UUID.randomUUID(); + private static final String DESTINATION_DEFINITION_NAME = "snowflake"; + private static final StandardDestinationDefinition.ReleaseStage DESTINATION_RELEASE_STAGE = StandardDestinationDefinition.ReleaseStage.BETA; + private static final String DESTINATION_DOCKER_IMAGE = "airbyte/destination-snowflake:1.2.3"; + + private ConfigRepository configRepository; + private JobErrorReportingClient jobErrorReportingClient; + private JobErrorReporter jobErrorReporter; + + @BeforeEach + void setup() { + configRepository = mock(ConfigRepository.class); + jobErrorReportingClient = mock(JobErrorReportingClient.class); + jobErrorReporter = new JobErrorReporter(configRepository, DEPLOYMENT_MODE, AIRBYTE_VERSION, jobErrorReportingClient); + } + + @Test + void testReportSyncJobFailure() { + final AttemptFailureSummary mFailureSummary = Mockito.mock(AttemptFailureSummary.class); + + final FailureReason sourceFailureReason = new FailureReason() + .withMetadata(new Metadata().withAdditionalProperty("from_trace_message", true)) + .withFailureOrigin(FailureOrigin.SOURCE) + .withFailureType(FailureType.SYSTEM_ERROR); + + final FailureReason destinationFailureReason = new FailureReason() + .withMetadata(new Metadata().withAdditionalProperty("from_trace_message", true)) + .withFailureOrigin(FailureOrigin.DESTINATION) + .withFailureType(FailureType.SYSTEM_ERROR); + + final FailureReason nonTraceMessageFailureReason = new FailureReason().withFailureOrigin(FailureOrigin.SOURCE); + final FailureReason replicationFailureReason = new FailureReason().withFailureOrigin(FailureOrigin.REPLICATION); + + Mockito.when(mFailureSummary.getFailures()) + .thenReturn(List.of(sourceFailureReason, destinationFailureReason, nonTraceMessageFailureReason, replicationFailureReason)); + + final JobSyncConfig mJobSyncConfig = Mockito.mock(JobSyncConfig.class); + Mockito.when(mJobSyncConfig.getSourceDockerImage()).thenReturn(SOURCE_DOCKER_IMAGE); + Mockito.when(mJobSyncConfig.getDestinationDockerImage()).thenReturn(DESTINATION_DOCKER_IMAGE); + + Mockito.when(configRepository.getSourceDefinitionFromConnection(CONNECTION_ID)) + .thenReturn(new StandardSourceDefinition() + .withReleaseStage(SOURCE_RELEASE_STAGE) + .withSourceDefinitionId(SOURCE_DEFINITION_ID) + .withName(SOURCE_DEFINITION_NAME)); + + Mockito.when(configRepository.getDestinationDefinitionFromConnection(CONNECTION_ID)) + .thenReturn(new StandardDestinationDefinition() + .withReleaseStage(DESTINATION_RELEASE_STAGE) + .withDestinationDefinitionId(DESTINATION_DEFINITION_ID) + .withName(DESTINATION_DEFINITION_NAME)); + + final StandardWorkspace mWorkspace = Mockito.mock(StandardWorkspace.class); + Mockito.when(configRepository.getStandardWorkspaceFromConnection(CONNECTION_ID, true)).thenReturn(mWorkspace); + + jobErrorReporter.reportSyncJobFailure(CONNECTION_ID, mFailureSummary, mJobSyncConfig); + + final Map expectedSourceMetadata = Map.of( + "connection_id", CONNECTION_ID.toString(), + "deployment_mode", DEPLOYMENT_MODE.name(), + "airbyte_version", AIRBYTE_VERSION, + "failure_origin", "source", + "failure_type", "system_error", + "connector_definition_id", SOURCE_DEFINITION_ID.toString(), + "connector_name", SOURCE_DEFINITION_NAME, + "connector_release_stage", SOURCE_RELEASE_STAGE.toString()); + + final Map expectedDestinationMetadata = Map.of( + "connection_id", CONNECTION_ID.toString(), + "deployment_mode", DEPLOYMENT_MODE.name(), + "airbyte_version", AIRBYTE_VERSION, + "failure_origin", "destination", + "failure_type", "system_error", + "connector_definition_id", DESTINATION_DEFINITION_ID.toString(), + "connector_name", DESTINATION_DEFINITION_NAME, + "connector_release_stage", DESTINATION_RELEASE_STAGE.toString()); + + Mockito.verify(jobErrorReportingClient).reportJobFailureReason(mWorkspace, sourceFailureReason, SOURCE_DOCKER_IMAGE, expectedSourceMetadata); + Mockito.verify(jobErrorReportingClient).reportJobFailureReason(mWorkspace, destinationFailureReason, DESTINATION_DOCKER_IMAGE, + expectedDestinationMetadata); + Mockito.verifyNoMoreInteractions(jobErrorReportingClient); + } + + @Test + void testReportSyncJobFailureDoesNotThrow() { + final AttemptFailureSummary mFailureSummary = Mockito.mock(AttemptFailureSummary.class); + final JobSyncConfig mJobSyncConfig = Mockito.mock(JobSyncConfig.class); + + final FailureReason sourceFailureReason = new FailureReason() + .withMetadata(new Metadata().withAdditionalProperty("from_trace_message", true)) + .withFailureOrigin(FailureOrigin.SOURCE) + .withFailureType(FailureType.SYSTEM_ERROR); + + Mockito.when(mFailureSummary.getFailures()).thenReturn(List.of(sourceFailureReason)); + + Mockito.when(configRepository.getSourceDefinitionFromConnection(CONNECTION_ID)) + .thenReturn(new StandardSourceDefinition() + .withReleaseStage(SOURCE_RELEASE_STAGE) + .withSourceDefinitionId(SOURCE_DEFINITION_ID) + .withName(SOURCE_DEFINITION_NAME)); + + Mockito.doThrow(new RuntimeException("some exception")) + .when(jobErrorReportingClient) + .reportJobFailureReason(Mockito.any(), Mockito.eq(sourceFailureReason), Mockito.any(), Mockito.any()); + + Assertions.assertDoesNotThrow(() -> jobErrorReporter.reportSyncJobFailure(CONNECTION_ID, mFailureSummary, mJobSyncConfig)); + Mockito.verify(jobErrorReportingClient, Mockito.times(1)) + .reportJobFailureReason(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); + } + +} diff --git a/airbyte-scheduler/scheduler-persistence/src/test/java/io/airbyte/scheduler/persistence/job_error_reporter/JobErrorReportingClientFactoryTest.java b/airbyte-scheduler/scheduler-persistence/src/test/java/io/airbyte/scheduler/persistence/job_error_reporter/JobErrorReportingClientFactoryTest.java new file mode 100644 index 000000000000..b6ebd65ad6a5 --- /dev/null +++ b/airbyte-scheduler/scheduler-persistence/src/test/java/io/airbyte/scheduler/persistence/job_error_reporter/JobErrorReportingClientFactoryTest.java @@ -0,0 +1,33 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.scheduler.persistence.job_error_reporter; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +import io.airbyte.config.Configs; +import io.airbyte.config.Configs.JobErrorReportingStrategy; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +public class JobErrorReportingClientFactoryTest { + + @Test + void testCreateErrorReportingClientLogging() { + assertTrue( + JobErrorReportingClientFactory.getClient( + JobErrorReportingStrategy.LOGGING, Mockito.mock(Configs.class)) instanceof LoggingJobErrorReportingClient); + } + + @Test + void testCreateErrorReportingClientSentry() { + final Configs configsMock = Mockito.mock(Configs.class); + Mockito.when(configsMock.getJobErrorReportingSentryDSN()).thenReturn(""); + + assertTrue( + JobErrorReportingClientFactory.getClient( + JobErrorReportingStrategy.SENTRY, configsMock) instanceof SentryJobErrorReportingClient); + } + +} diff --git a/airbyte-scheduler/scheduler-persistence/src/test/java/io/airbyte/scheduler/persistence/job_error_reporter/SentryExceptionHelperTest.java b/airbyte-scheduler/scheduler-persistence/src/test/java/io/airbyte/scheduler/persistence/job_error_reporter/SentryExceptionHelperTest.java new file mode 100644 index 000000000000..55aa7dc2c385 --- /dev/null +++ b/airbyte-scheduler/scheduler-persistence/src/test/java/io/airbyte/scheduler/persistence/job_error_reporter/SentryExceptionHelperTest.java @@ -0,0 +1,366 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.scheduler.persistence.job_error_reporter; + +import io.sentry.protocol.SentryException; +import io.sentry.protocol.SentryStackFrame; +import io.sentry.protocol.SentryStackTrace; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +public class SentryExceptionHelperTest { + + final SentryExceptionHelper exceptionHelper = new SentryExceptionHelper(); + + @Test + void testBuildSentryExceptionsInvalid() { + final String stacktrace = "this is not a stacktrace"; + final Optional> exceptionList = exceptionHelper.buildSentryExceptions(stacktrace); + Assertions.assertTrue(exceptionList.isEmpty()); + } + + @Test + void testBuildSentryExceptionsPartiallyInvalid() { + final String stacktrace = "Traceback (most recent call last):\n Oops!"; + final Optional> exceptionList = exceptionHelper.buildSentryExceptions(stacktrace); + Assertions.assertTrue(exceptionList.isEmpty()); + } + + @Test + void testBuildSentryExceptionsPythonChained() { + final String stacktrace = + """ + Traceback (most recent call last): + File "/airbyte/connector-errors/error.py", line 31, in read_records + failing_method() + File "/airbyte/connector-errors/error.py", line 36, in failing_method + raise HTTPError(http_error_msg, response=self) + requests.exceptions.HTTPError: 400 Client Error: Bad Request for url: https://airbyte.com + + The above exception was the direct cause of the following exception: + + Traceback (most recent call last): + File "/airbyte/connector-errors/error.py", line 39, in + main() + File "/airbyte/connector-errors/error.py", line 13, in main + sync_mode("incremental") + File "/airbyte/connector-errors/error.py", line 17, in sync_mode + incremental() + File "/airbyte/connector-errors/error.py", line 33, in incremental + raise RuntimeError("My other error") from err + RuntimeError: My other error + """; + + final Optional> optionalSentryExceptions = exceptionHelper.buildSentryExceptions(stacktrace); + Assertions.assertTrue(optionalSentryExceptions.isPresent()); + final List exceptionList = optionalSentryExceptions.get(); + Assertions.assertEquals(2, exceptionList.size()); + + assertExceptionContent(exceptionList.get(0), "requests.exceptions.HTTPError", "400 Client Error: Bad Request for url: https://airbyte.com", + List.of( + Map.of( + "abspath", "/airbyte/connector-errors/error.py", + "lineno", 31, + "function", "read_records", + "context_line", "failing_method()"), + Map.of( + "abspath", "/airbyte/connector-errors/error.py", + "lineno", 36, + "function", "failing_method", + "context_line", "raise HTTPError(http_error_msg, response=self)"))); + + assertExceptionContent(exceptionList.get(1), "RuntimeError", "My other error", List.of( + Map.of( + "abspath", "/airbyte/connector-errors/error.py", + "lineno", 39, + "function", "", + "context_line", "main()"), + Map.of( + "abspath", "/airbyte/connector-errors/error.py", + "lineno", 13, + "function", "main", + "context_line", "sync_mode(\"incremental\")"), + Map.of( + "abspath", "/airbyte/connector-errors/error.py", + "lineno", 17, + "function", "sync_mode", + "context_line", "incremental()"), + Map.of( + "abspath", "/airbyte/connector-errors/error.py", + "lineno", 33, + "function", "incremental", + "context_line", "raise RuntimeError(\"My other error\") from err"))); + + } + + @Test + void testBuildSentryExceptionsPythonNoValue() { + final String stacktrace = + """ + Traceback (most recent call last): + File "/airbyte/connector-errors/error.py", line 33, in incremental + raise RuntimeError() + RuntimeError + """; + + final Optional> optionalSentryExceptions = exceptionHelper.buildSentryExceptions(stacktrace); + Assertions.assertTrue(optionalSentryExceptions.isPresent()); + final List exceptionList = optionalSentryExceptions.get(); + Assertions.assertEquals(1, exceptionList.size()); + + assertExceptionContent(exceptionList.get(0), "RuntimeError", null, List.of( + Map.of( + "abspath", "/airbyte/connector-errors/error.py", + "lineno", 33, + "function", "incremental", + "context_line", "raise RuntimeError()"))); + } + + @Test + void testBuildSentryExceptionsPythonMultilineValue() { + final String stacktrace = + """ + Traceback (most recent call last): + File "/usr/local/lib/python3.9/site-packages/grpc/_channel.py", line 849, in _end_unary_response_blocking + raise _InactiveRpcError(state) + grpc._channel._InactiveRpcError: <_InactiveRpcError of RPC that terminated with: + status = StatusCode.INTERNAL + details = "Internal error encountered." + > + + During handling of the above exception, another exception occurred: + + Traceback (most recent call last): + File "/usr/local/lib/python3.9/site-packages/google/api_core/exceptions.py", line 553, in _parse_grpc_error_details + status = rpc_status.from_call(rpc_exc) + AttributeError: 'NoneType' object has no attribute 'from_call' + """; + + final Optional> optionalSentryExceptions = exceptionHelper.buildSentryExceptions(stacktrace); + Assertions.assertTrue(optionalSentryExceptions.isPresent()); + final List exceptionList = optionalSentryExceptions.get(); + Assertions.assertEquals(2, exceptionList.size()); + + final String expectedValue = + """ + <_InactiveRpcError of RPC that terminated with: + status = StatusCode.INTERNAL + details = "Internal error encountered." + >"""; + + assertExceptionContent(exceptionList.get(0), "grpc._channel._InactiveRpcError", expectedValue, List.of( + Map.of( + "abspath", "/usr/local/lib/python3.9/site-packages/grpc/_channel.py", + "lineno", 849, + "function", "_end_unary_response_blocking", + "context_line", "raise _InactiveRpcError(state)"))); + + assertExceptionContent(exceptionList.get(1), "AttributeError", "'NoneType' object has no attribute 'from_call'", List.of( + Map.of( + "abspath", "/usr/local/lib/python3.9/site-packages/google/api_core/exceptions.py", + "lineno", 553, + "function", "_parse_grpc_error_details", + "context_line", "status = rpc_status.from_call(rpc_exc)"))); + } + + @Test + void testBuildSentryExceptionsJava() { + final String stacktrace = + """ + java.lang.ArithmeticException: / by zero + at io.airbyte.integrations.base.AirbyteTraceMessageUtilityTest.testCorrectStacktraceFormat(AirbyteTraceMessageUtilityTest.java:61) + at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at org.junit.jupiter.engine.execution.ExecutableInvoker$ReflectiveInterceptorCall.lambda$ofVoidMethod$0(ExecutableInvoker.java:115) + at app//org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$8(NodeTestTask.java:141) + at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73) + at jdk.proxy2/jdk.proxy2.$Proxy5.stop(Unknown Source) + at worker.org.gradle.process.internal.worker.GradleWorkerMain.main(GradleWorkerMain.java:74) + """; + + final Optional> optionalSentryExceptions = exceptionHelper.buildSentryExceptions(stacktrace); + Assertions.assertTrue(optionalSentryExceptions.isPresent()); + final List exceptionList = optionalSentryExceptions.get(); + Assertions.assertEquals(1, exceptionList.size()); + + assertExceptionContent(exceptionList.get(0), "java.lang.ArithmeticException", "/ by zero", + List.of( + Map.of( + "filename", "GradleWorkerMain.java", + "lineno", 74, + "module", "worker.org.gradle.process.internal.worker.GradleWorkerMain", + "function", "main"), + Map.of( + "module", "jdk.proxy2.$Proxy5", + "function", "stop"), + Map.of( + "filename", "ThrowableCollector.java", + "lineno", 73, + "module", "org.junit.platform.engine.support.hierarchical.ThrowableCollector", + "function", "execute"), + Map.of( + "filename", "NodeTestTask.java", + "lineno", 141, + "module", "org.junit.platform.engine.support.hierarchical.NodeTestTask", + "function", "lambda$executeRecursively$8"), + Map.of( + "filename", "ExecutableInvoker.java", + "lineno", 115, + "module", "org.junit.jupiter.engine.execution.ExecutableInvoker$ReflectiveInterceptorCall", + "function", "lambda$ofVoidMethod$0"), + Map.of( + "isNative", true, + "module", "jdk.internal.reflect.NativeMethodAccessorImpl", + "function", "invoke0"), + Map.of( + "filename", "AirbyteTraceMessageUtilityTest.java", + "lineno", 61, + "module", "io.airbyte.integrations.base.AirbyteTraceMessageUtilityTest", + "function", "testCorrectStacktraceFormat"))); + } + + @Test + void testBuildSentryExceptionsJavaChained() { + final String stacktrace = + """ + java.util.concurrent.CompletionException: io.airbyte.workers.DefaultReplicationWorker$DestinationException: Destination process exited with non-zero exit code 1 + at java.base/java.util.concurrent.CompletableFuture.encodeThrowable(CompletableFuture.java:315) + at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) + at java.base/java.lang.Thread.run(Thread.java:833) + Suppressed: io.airbyte.workers.exception.WorkerException: Source process exit with code 1. This warning is normal if the job was cancelled. + at io.airbyte.workers.internal.DefaultAirbyteSource.close(DefaultAirbyteSource.java:136) + at io.airbyte.workers.general.DefaultReplicationWorker.run(DefaultReplicationWorker.java:137) + at io.airbyte.workers.general.DefaultReplicationWorker.run(DefaultReplicationWorker.java:65) + at io.airbyte.workers.temporal.TemporalAttemptExecution.lambda$getWorkerThread$2(TemporalAttemptExecution.java:158) + at java.lang.Thread.run(Thread.java:833) + Caused by: io.airbyte.workers.DefaultReplicationWorker$DestinationException: Destination process exited with non-zero exit code 1 + at io.airbyte.workers.DefaultReplicationWorker.lambda$getDestinationOutputRunnable$7(DefaultReplicationWorker.java:397) + at java.base/java.util.concurrent.CompletableFuture$AsyncRun.run(CompletableFuture.java:1804) + ... 3 more + """; + + final Optional> optionalSentryExceptions = exceptionHelper.buildSentryExceptions(stacktrace); + Assertions.assertTrue(optionalSentryExceptions.isPresent()); + final List exceptionList = optionalSentryExceptions.get(); + Assertions.assertEquals(2, exceptionList.size()); + + assertExceptionContent(exceptionList.get(0), "java.util.concurrent.CompletionException", + "io.airbyte.workers.DefaultReplicationWorker$DestinationException: Destination process exited with non-zero exit code 1", + List.of( + Map.of( + "filename", "Thread.java", + "lineno", 833, + "module", "java.lang.Thread", + "function", "run"), + Map.of( + "filename", "ThreadPoolExecutor.java", + "lineno", 635, + "module", "java.util.concurrent.ThreadPoolExecutor$Worker", + "function", "run"), + Map.of( + "filename", "CompletableFuture.java", + "lineno", 315, + "module", "java.util.concurrent.CompletableFuture", + "function", "encodeThrowable"))); + + assertExceptionContent(exceptionList.get(1), "io.airbyte.workers.DefaultReplicationWorker$DestinationException", + "Destination process exited with non-zero exit code 1", List.of( + Map.of( + "filename", "CompletableFuture.java", + "lineno", 1804, + "module", "java.util.concurrent.CompletableFuture$AsyncRun", + "function", "run"), + Map.of( + "filename", "DefaultReplicationWorker.java", + "lineno", 397, + "module", "io.airbyte.workers.DefaultReplicationWorker", + "function", "lambda$getDestinationOutputRunnable$7"))); + } + + @Test + void testBuildSentryExceptionsJavaMultilineValue() { + final String stacktrace = + """ + io.temporal.failure.ApplicationFailure: GET https://storage.googleapis.com/ + { + "code" : 401, + "message" : "Invalid Credentials" + } + at com.google.api.client.googleapis.json.GoogleJsonResponseException.from(GoogleJsonResponseException.java:146) + ... 22 more + """; + + final Optional> optionalSentryExceptions = exceptionHelper.buildSentryExceptions(stacktrace); + Assertions.assertTrue(optionalSentryExceptions.isPresent()); + final List exceptionList = optionalSentryExceptions.get(); + Assertions.assertEquals(1, exceptionList.size()); + + final String expectedValue = + """ + GET https://storage.googleapis.com/ + { + "code" : 401, + "message" : "Invalid Credentials" + }"""; + + assertExceptionContent(exceptionList.get(0), "io.temporal.failure.ApplicationFailure", + expectedValue, List.of( + Map.of( + "filename", "GoogleJsonResponseException.java", + "lineno", 146, + "module", "com.google.api.client.googleapis.json.GoogleJsonResponseException", + "function", "from"))); + } + + private void assertExceptionContent(final SentryException exception, + final String type, + final String value, + final List> frames) { + Assertions.assertEquals(type, exception.getType()); + Assertions.assertEquals(value, exception.getValue()); + + final SentryStackTrace stackTrace = exception.getStacktrace(); + Assertions.assertNotNull(stackTrace); + final List sentryFrames = stackTrace.getFrames(); + Assertions.assertNotNull(sentryFrames); + Assertions.assertEquals(frames.size(), sentryFrames.size()); + + for (int i = 0; i < frames.size(); i++) { + final Map expectedFrame = frames.get(i); + final SentryStackFrame sentryFrame = sentryFrames.get(i); + + if (expectedFrame.containsKey("module")) { + Assertions.assertEquals(expectedFrame.get("module"), sentryFrame.getModule()); + } + + if (expectedFrame.containsKey("filename")) { + Assertions.assertEquals(expectedFrame.get("filename"), sentryFrame.getFilename()); + } + + if (expectedFrame.containsKey("abspath")) { + Assertions.assertEquals(expectedFrame.get("abspath"), sentryFrame.getAbsPath()); + } + + if (expectedFrame.containsKey("function")) { + Assertions.assertEquals(expectedFrame.get("function"), sentryFrame.getFunction()); + } + + if (expectedFrame.containsKey("lineno")) { + Assertions.assertEquals(expectedFrame.get("lineno"), sentryFrame.getLineno()); + } + + if (expectedFrame.containsKey("context_line")) { + Assertions.assertEquals(expectedFrame.get("context_line"), sentryFrame.getContextLine()); + } + + if (expectedFrame.containsKey("isNative")) { + Assertions.assertEquals(expectedFrame.get("isNative"), sentryFrame.isNative()); + } + } + } + +} diff --git a/airbyte-scheduler/scheduler-persistence/src/test/java/io/airbyte/scheduler/persistence/job_error_reporter/SentryJobErrorReportingClientTest.java b/airbyte-scheduler/scheduler-persistence/src/test/java/io/airbyte/scheduler/persistence/job_error_reporter/SentryJobErrorReportingClientTest.java new file mode 100644 index 000000000000..cff663df1b19 --- /dev/null +++ b/airbyte-scheduler/scheduler-persistence/src/test/java/io/airbyte/scheduler/persistence/job_error_reporter/SentryJobErrorReportingClientTest.java @@ -0,0 +1,153 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.scheduler.persistence.job_error_reporter; + +import static io.airbyte.scheduler.persistence.job_error_reporter.SentryJobErrorReportingClient.STACKTRACE_PARSE_ERROR_TAG_KEY; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import io.airbyte.config.FailureReason; +import io.airbyte.config.FailureReason.FailureOrigin; +import io.airbyte.config.FailureReason.FailureType; +import io.airbyte.config.StandardWorkspace; +import io.sentry.IHub; +import io.sentry.NoOpHub; +import io.sentry.SentryEvent; +import io.sentry.protocol.Message; +import io.sentry.protocol.SentryException; +import io.sentry.protocol.User; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.UUID; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.ArgumentCaptor; + +public class SentryJobErrorReportingClientTest { + + private static final UUID WORKSPACE_ID = UUID.randomUUID(); + private static final String WORKSPACE_NAME = "My Workspace"; + private static final String DOCKER_IMAGE = "airbyte/source-stripe:1.2.3"; + + private final StandardWorkspace workspace = new StandardWorkspace().withWorkspaceId(WORKSPACE_ID).withName(WORKSPACE_NAME); + private SentryJobErrorReportingClient sentryErrorReportingClient; + private IHub mockSentryHub; + private SentryExceptionHelper mockSentryExceptionHelper; + + @BeforeEach + void setup() { + mockSentryHub = mock(IHub.class); + mockSentryExceptionHelper = mock(SentryExceptionHelper.class); + sentryErrorReportingClient = new SentryJobErrorReportingClient(mockSentryHub, mockSentryExceptionHelper); + } + + @Test + void testCreateSentryHubWithBlankDSN() { + final String sentryDSN = ""; + final IHub sentryHub = SentryJobErrorReportingClient.createSentryHubWithDSN(sentryDSN); + assertEquals(NoOpHub.getInstance(), sentryHub); + } + + @Test + void testCreateSentryHubWithNullDSN() { + final IHub sentryHub = SentryJobErrorReportingClient.createSentryHubWithDSN(null); + assertEquals(NoOpHub.getInstance(), sentryHub); + } + + @Test + void testCreateSentryHubWithDSN() { + final String sentryDSN = "https://public@sentry.example.com/1"; + final IHub sentryHub = SentryJobErrorReportingClient.createSentryHubWithDSN(sentryDSN); + assertNotNull(sentryHub); + assertEquals(sentryDSN, sentryHub.getOptions().getDsn()); + assertFalse(sentryHub.getOptions().isAttachStacktrace()); + assertFalse(sentryHub.getOptions().isEnableUncaughtExceptionHandler()); + } + + @Test + void testReportJobFailureReason() { + final ArgumentCaptor eventCaptor = ArgumentCaptor.forClass(SentryEvent.class); + + final FailureReason failureReason = new FailureReason() + .withFailureOrigin(FailureOrigin.SOURCE) + .withFailureType(FailureType.SYSTEM_ERROR) + .withInternalMessage("RuntimeError: Something went wrong"); + final Map metadata = Map.of("some_metadata", "some_metadata_value"); + + sentryErrorReportingClient.reportJobFailureReason(workspace, failureReason, DOCKER_IMAGE, metadata); + + verify(mockSentryHub).captureEvent(eventCaptor.capture()); + final SentryEvent actualEvent = eventCaptor.getValue(); + assertEquals("other", actualEvent.getPlatform()); + assertEquals("airbyte-source-stripe@1.2.3", actualEvent.getRelease()); + assertEquals(List.of("{{ default }}", "airbyte-source-stripe"), actualEvent.getFingerprints()); + assertEquals("some_metadata_value", actualEvent.getTag("some_metadata")); + assertNull(actualEvent.getTag(STACKTRACE_PARSE_ERROR_TAG_KEY)); + assertNull(actualEvent.getExceptions()); + + final User sentryUser = actualEvent.getUser(); + assertNotNull(sentryUser); + assertEquals(WORKSPACE_ID.toString(), sentryUser.getId()); + assertEquals(WORKSPACE_NAME, sentryUser.getUsername()); + + final Message message = actualEvent.getMessage(); + assertNotNull(message); + assertEquals("RuntimeError: Something went wrong", message.getFormatted()); + } + + @Test + void testReportJobFailureReasonWithStacktrace() { + final ArgumentCaptor eventCaptor = ArgumentCaptor.forClass(SentryEvent.class); + + final List exceptions = new ArrayList<>(); + final SentryException exception = new SentryException(); + exception.setType("RuntimeError"); + exception.setValue("Something went wrong"); + exceptions.add(exception); + + when(mockSentryExceptionHelper.buildSentryExceptions("Some valid stacktrace")).thenReturn(Optional.of(exceptions)); + + final FailureReason failureReason = new FailureReason() + .withInternalMessage("RuntimeError: Something went wrong") + .withStacktrace("Some valid stacktrace"); + + sentryErrorReportingClient.reportJobFailureReason(workspace, failureReason, DOCKER_IMAGE, Map.of()); + + verify(mockSentryHub).captureEvent(eventCaptor.capture()); + final SentryEvent actualEvent = eventCaptor.getValue(); + assertEquals(exceptions, actualEvent.getExceptions()); + assertNull(actualEvent.getTag(STACKTRACE_PARSE_ERROR_TAG_KEY)); + } + + @Test + void testReportJobFailureReasonWithInvalidStacktrace() { + final ArgumentCaptor eventCaptor = ArgumentCaptor.forClass(SentryEvent.class); + final String invalidStacktrace = "Invalid stacktrace\nRuntimeError: Something went wrong"; + + when(mockSentryExceptionHelper.buildSentryExceptions(invalidStacktrace)).thenReturn(Optional.empty()); + + final FailureReason failureReason = new FailureReason() + .withInternalMessage("Something went wrong") + .withStacktrace(invalidStacktrace); + + sentryErrorReportingClient.reportJobFailureReason(workspace, failureReason, DOCKER_IMAGE, Map.of()); + + verify(mockSentryHub).captureEvent(eventCaptor.capture()); + final SentryEvent actualEvent = eventCaptor.getValue(); + assertEquals("1", actualEvent.getTag(STACKTRACE_PARSE_ERROR_TAG_KEY)); + final List exceptions = actualEvent.getExceptions(); + assertNotNull(exceptions); + assertEquals(1, exceptions.size()); + assertEquals("Invalid stacktrace, RuntimeError: ", exceptions.get(0).getValue()); + } + +} diff --git a/airbyte-server/Dockerfile b/airbyte-server/Dockerfile index 05f76bc22e44..1f5264d1d0be 100644 --- a/airbyte-server/Dockerfile +++ b/airbyte-server/Dockerfile @@ -4,7 +4,7 @@ FROM ${JDK_IMAGE} AS server EXPOSE 8000 -ARG VERSION=0.39.17-alpha +ARG VERSION=0.39.28-alpha ENV APPLICATION airbyte-server ENV VERSION ${VERSION} diff --git a/airbyte-server/build.gradle b/airbyte-server/build.gradle index 71fb8762ef15..55ade6948821 100644 --- a/airbyte-server/build.gradle +++ b/airbyte-server/build.gradle @@ -36,7 +36,7 @@ dependencies { testImplementation project(':airbyte-test-utils') testImplementation libs.postgresql - testImplementation libs.testcontainers.postgresql + testImplementation libs.platform.testcontainers.postgresql testImplementation 'com.squareup.okhttp3:mockwebserver:4.9.1' } diff --git a/airbyte-server/src/main/java/io/airbyte/server/ConfigurationApiFactory.java b/airbyte-server/src/main/java/io/airbyte/server/ConfigurationApiFactory.java index 0c3c192783c6..c64dde085e60 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/ConfigurationApiFactory.java +++ b/airbyte-server/src/main/java/io/airbyte/server/ConfigurationApiFactory.java @@ -13,6 +13,7 @@ import io.airbyte.config.persistence.ConfigRepository; import io.airbyte.config.persistence.SecretsRepositoryReader; import io.airbyte.config.persistence.SecretsRepositoryWriter; +import io.airbyte.config.persistence.StatePersistence; import io.airbyte.db.Database; import io.airbyte.scheduler.client.EventRunner; import io.airbyte.scheduler.client.SynchronousSchedulerClient; @@ -34,6 +35,7 @@ public class ConfigurationApiFactory implements Factory { private static SecretsRepositoryWriter secretsRepositoryWriter; private static SynchronousSchedulerClient synchronousSchedulerClient; private static FileTtlManager archiveTtlManager; + private static StatePersistence statePersistence; private static Map mdc; private static Database configsDatabase; private static Database jobsDatabase; @@ -55,6 +57,7 @@ public static void setValues( final ConfigPersistence seed, final SynchronousSchedulerClient synchronousSchedulerClient, final FileTtlManager archiveTtlManager, + final StatePersistence statePersistence, final Map mdc, final Database configsDatabase, final Database jobsDatabase, @@ -86,6 +89,7 @@ public static void setValues( ConfigurationApiFactory.eventRunner = eventRunner; ConfigurationApiFactory.configsFlyway = configsFlyway; ConfigurationApiFactory.jobsFlyway = jobsFlyway; + ConfigurationApiFactory.statePersistence = statePersistence; } @Override @@ -102,6 +106,7 @@ public ConfigurationApi provide() { ConfigurationApiFactory.archiveTtlManager, ConfigurationApiFactory.configsDatabase, ConfigurationApiFactory.jobsDatabase, + ConfigurationApiFactory.statePersistence, ConfigurationApiFactory.trackingClient, ConfigurationApiFactory.workerEnvironment, ConfigurationApiFactory.logConfigs, diff --git a/airbyte-server/src/main/java/io/airbyte/server/ServerApp.java b/airbyte-server/src/main/java/io/airbyte/server/ServerApp.java index 2ddec2458063..9c0c0abfa6fb 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/ServerApp.java +++ b/airbyte-server/src/main/java/io/airbyte/server/ServerApp.java @@ -24,6 +24,7 @@ import io.airbyte.config.persistence.DatabaseConfigPersistence; import io.airbyte.config.persistence.SecretsRepositoryReader; import io.airbyte.config.persistence.SecretsRepositoryWriter; +import io.airbyte.config.persistence.StreamResetPersistence; import io.airbyte.config.persistence.split_secrets.JsonSecretsProcessor; import io.airbyte.config.persistence.split_secrets.SecretPersistence; import io.airbyte.config.persistence.split_secrets.SecretsHydrator; @@ -51,6 +52,8 @@ import io.airbyte.server.handlers.DbMigrationHandler; import io.airbyte.validation.json.JsonValidationException; import io.airbyte.workers.temporal.TemporalClient; +import io.airbyte.workers.temporal.TemporalUtils; +import io.temporal.serviceclient.WorkflowServiceStubs; import java.io.IOException; import java.net.http.HttpClient; import java.util.Map; @@ -192,14 +195,20 @@ public static ServerRunnable getServer(final ServerFactory apiFactory, final TrackingClient trackingClient = TrackingClientSingleton.get(); final JobTracker jobTracker = new JobTracker(configRepository, jobPersistence, trackingClient); + final StreamResetPersistence streamResetPersistence = new StreamResetPersistence(configsDatabase); + final WorkflowServiceStubs temporalService = TemporalUtils.createTemporalService(); + + final TemporalClient temporalClient = new TemporalClient( + TemporalUtils.createWorkflowClient(temporalService, TemporalUtils.getNamespace()), + configs.getWorkspaceRoot(), + temporalService, + streamResetPersistence); - final TemporalClient temporalClient = TemporalClient.production(configs.getTemporalHost(), configs.getWorkspaceRoot(), configs); final OAuthConfigSupplier oAuthConfigSupplier = new OAuthConfigSupplier(configRepository, trackingClient); final DefaultSynchronousSchedulerClient syncSchedulerClient = new DefaultSynchronousSchedulerClient(temporalClient, jobTracker, oAuthConfigSupplier); final HttpClient httpClient = HttpClient.newBuilder().version(HttpClient.Version.HTTP_1_1).build(); - final EventRunner eventRunner = new TemporalEventRunner( - TemporalClient.production(configs.getTemporalHost(), configs.getWorkspaceRoot(), configs)); + final EventRunner eventRunner = new TemporalEventRunner(temporalClient); // It is important that the migration to the temporal scheduler is performed before the server // accepts any requests. diff --git a/airbyte-server/src/main/java/io/airbyte/server/ServerFactory.java b/airbyte-server/src/main/java/io/airbyte/server/ServerFactory.java index 136b1c956409..279e834a09ca 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/ServerFactory.java +++ b/airbyte-server/src/main/java/io/airbyte/server/ServerFactory.java @@ -13,6 +13,7 @@ import io.airbyte.config.persistence.ConfigRepository; import io.airbyte.config.persistence.SecretsRepositoryReader; import io.airbyte.config.persistence.SecretsRepositoryWriter; +import io.airbyte.config.persistence.StatePersistence; import io.airbyte.db.Database; import io.airbyte.scheduler.client.EventRunner; import io.airbyte.scheduler.client.SynchronousSchedulerClient; @@ -74,6 +75,7 @@ public ServerRunnable create(final SynchronousSchedulerClient synchronousSchedul seed, synchronousSchedulerClient, new FileTtlManager(10, TimeUnit.MINUTES, 10), + new StatePersistence(configsDatabase), MDC.getCopyOfContextMap(), configsDatabase, jobsDatabase, diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/ConfigurationApi.java b/airbyte-server/src/main/java/io/airbyte/server/apis/ConfigurationApi.java index 0da999eed61c..017c171429e5 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/ConfigurationApi.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/ConfigurationApi.java @@ -15,6 +15,7 @@ import io.airbyte.api.model.generated.ConnectionReadList; import io.airbyte.api.model.generated.ConnectionSearch; import io.airbyte.api.model.generated.ConnectionState; +import io.airbyte.api.model.generated.ConnectionStateType; import io.airbyte.api.model.generated.ConnectionUpdate; import io.airbyte.api.model.generated.CustomDestinationDefinitionCreate; import io.airbyte.api.model.generated.CustomDestinationDefinitionUpdate; @@ -105,6 +106,7 @@ import io.airbyte.config.persistence.ConfigRepository; import io.airbyte.config.persistence.SecretsRepositoryReader; import io.airbyte.config.persistence.SecretsRepositoryWriter; +import io.airbyte.config.persistence.StatePersistence; import io.airbyte.db.Database; import io.airbyte.scheduler.client.EventRunner; import io.airbyte.scheduler.client.SynchronousSchedulerClient; @@ -126,6 +128,7 @@ import io.airbyte.server.handlers.SchedulerHandler; import io.airbyte.server.handlers.SourceDefinitionsHandler; import io.airbyte.server.handlers.SourceHandler; +import io.airbyte.server.handlers.StateHandler; import io.airbyte.server.handlers.WebBackendConnectionsHandler; import io.airbyte.server.handlers.WorkspacesHandler; import io.airbyte.validation.json.JsonSchemaValidator; @@ -148,6 +151,7 @@ public class ConfigurationApi implements io.airbyte.api.generated.V1Api { private final ConnectionsHandler connectionsHandler; private final OperationsHandler operationsHandler; private final SchedulerHandler schedulerHandler; + private final StateHandler stateHandler; private final JobHistoryHandler jobHistoryHandler; private final WebBackendConnectionsHandler webBackendConnectionsHandler; private final HealthCheckHandler healthCheckHandler; @@ -169,6 +173,7 @@ public ConfigurationApi(final ConfigRepository configRepository, final FileTtlManager archiveTtlManager, final Database configsDatabase, final Database jobsDatabase, + final StatePersistence statePersistence, final TrackingClient trackingClient, final WorkerEnvironment workerEnvironment, final LogConfigs logConfigs, @@ -196,6 +201,7 @@ public ConfigurationApi(final ConfigRepository configRepository, logConfigs, eventRunner); + stateHandler = new StateHandler(statePersistence); connectionsHandler = new ConnectionsHandler( configRepository, workspaceHelper, @@ -222,6 +228,7 @@ public ConfigurationApi(final ConfigRepository configRepository, oAuthHandler = new OAuthHandler(configRepository, httpClient, trackingClient); webBackendConnectionsHandler = new WebBackendConnectionsHandler( connectionsHandler, + stateHandler, sourceHandler, destinationHandler, jobHistoryHandler, @@ -724,7 +731,7 @@ public OperationRead updateOperation(final OperationUpdate operationUpdate) { @Override public ConnectionState getState(final ConnectionIdRequestBody connectionIdRequestBody) { - return execute(() -> schedulerHandler.getState(connectionIdRequestBody)); + return execute(() -> stateHandler.getState(connectionIdRequestBody)); } // SCHEDULER @@ -803,11 +810,6 @@ public WebBackendConnectionRead webBackendGetConnection(final WebBackendConnecti return execute(() -> webBackendConnectionsHandler.webBackendGetConnection(webBackendConnectionRequestBody)); } - @Override - public WebBackendWorkspaceStateResult webBackendGetWorkspaceState(final WebBackendWorkspaceState webBackendWorkspaceState) { - return execute(() -> webBackendConnectionsHandler.getWorkspaceState(webBackendWorkspaceState)); - } - @Override public WebBackendConnectionRead webBackendCreateConnection(final WebBackendConnectionCreate webBackendConnectionCreate) { return execute(() -> webBackendConnectionsHandler.webBackendCreateConnection(webBackendConnectionCreate)); @@ -818,6 +820,16 @@ public WebBackendConnectionRead webBackendUpdateConnection(final WebBackendConne return execute(() -> webBackendConnectionsHandler.webBackendUpdateConnection(webBackendConnectionUpdate)); } + @Override + public ConnectionStateType getStateType(final ConnectionIdRequestBody connectionIdRequestBody) { + return execute(() -> webBackendConnectionsHandler.getStateType(connectionIdRequestBody)); + } + + @Override + public WebBackendWorkspaceStateResult webBackendGetWorkspaceState(final WebBackendWorkspaceState webBackendWorkspaceState) { + return execute(() -> webBackendConnectionsHandler.getWorkspaceState(webBackendWorkspaceState)); + } + // ARCHIVES @Override @@ -845,15 +857,15 @@ public ImportRead importIntoWorkspace(final ImportRequestBody importRequestBody) return execute(() -> archiveHandler.importIntoWorkspace(importRequestBody)); } - public boolean canImportDefinitons() { + public boolean canImportDefinitions() { return archiveHandler.canImportDefinitions(); } - private T execute(final HandlerCall call) { + private static T execute(final HandlerCall call) { try { return call.call(); } catch (final ConfigNotFoundException e) { - throw new IdNotFoundKnownException(String.format("Could not find configuration for %s: %s.", e.getType().toString(), e.getConfigId()), + throw new IdNotFoundKnownException(String.format("Could not find configuration for %s: %s.", e.getType(), e.getConfigId()), e.getConfigId(), e); } catch (final JsonValidationException e) { throw new BadObjectSchemaKnownException( diff --git a/airbyte-server/src/main/java/io/airbyte/server/converters/ApiPojoConverters.java b/airbyte-server/src/main/java/io/airbyte/server/converters/ApiPojoConverters.java index df76952f16f1..725da206ce68 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/converters/ApiPojoConverters.java +++ b/airbyte-server/src/main/java/io/airbyte/server/converters/ApiPojoConverters.java @@ -130,7 +130,8 @@ public static ConnectionRead internalToConnectionRead(final StandardSync standar .namespaceDefinition(Enums.convertTo(standardSync.getNamespaceDefinition(), io.airbyte.api.model.generated.NamespaceDefinitionType.class)) .namespaceFormat(standardSync.getNamespaceFormat()) .prefix(standardSync.getPrefix()) - .syncCatalog(CatalogConverter.toApi(standardSync.getCatalog())); + .syncCatalog(CatalogConverter.toApi(standardSync.getCatalog())) + .sourceCatalogId(standardSync.getSourceCatalogId()); if (standardSync.getResourceRequirements() != null) { connectionRead.resourceRequirements(resourceRequirementsToApi(standardSync.getResourceRequirements())); diff --git a/airbyte-server/src/main/java/io/airbyte/server/converters/CatalogDiffConverters.java b/airbyte-server/src/main/java/io/airbyte/server/converters/CatalogDiffConverters.java new file mode 100644 index 000000000000..658c5e11cb69 --- /dev/null +++ b/airbyte-server/src/main/java/io/airbyte/server/converters/CatalogDiffConverters.java @@ -0,0 +1,79 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.server.converters; + +import io.airbyte.api.model.generated.FieldAdd; +import io.airbyte.api.model.generated.FieldRemove; +import io.airbyte.api.model.generated.FieldSchemaUpdate; +import io.airbyte.api.model.generated.FieldTransform; +import io.airbyte.api.model.generated.StreamTransform; +import io.airbyte.commons.enums.Enums; +import io.airbyte.protocol.models.transform_models.FieldTransformType; +import io.airbyte.protocol.models.transform_models.StreamTransformType; +import java.util.List; +import java.util.Optional; + +/** + * Utility methods for converting between internal and API representation of catalog diffs. + */ +public class CatalogDiffConverters { + + public static StreamTransform streamTransformToApi(final io.airbyte.protocol.models.transform_models.StreamTransform transform) { + return new StreamTransform() + .transformType(Enums.convertTo(transform.getTransformType(), StreamTransform.TransformTypeEnum.class)) + .streamDescriptor(ProtocolConverters.streamDescriptorToApi(transform.getStreamDescriptor())) + .updateStream(updateStreamToApi(transform).orElse(null)); + } + + public static Optional> updateStreamToApi(final io.airbyte.protocol.models.transform_models.StreamTransform transform) { + if (transform.getTransformType() == StreamTransformType.UPDATE_STREAM) { + return Optional.ofNullable(transform.getUpdateStreamTransform() + .getFieldTransforms() + .stream() + .map(CatalogDiffConverters::fieldTransformToApi) + .toList()); + } else { + return Optional.empty(); + } + } + + public static FieldTransform fieldTransformToApi(final io.airbyte.protocol.models.transform_models.FieldTransform transform) { + return new FieldTransform() + .transformType(Enums.convertTo(transform.getTransformType(), FieldTransform.TransformTypeEnum.class)) + .fieldName(transform.getFieldName()) + .addField(addFieldToApi(transform).orElse(null)) + .removeField(removeFieldToApi(transform).orElse(null)) + .updateFieldSchema(updateFieldToApi(transform).orElse(null)); + } + + private static Optional addFieldToApi(final io.airbyte.protocol.models.transform_models.FieldTransform transform) { + if (transform.getTransformType() == FieldTransformType.ADD_FIELD) { + return Optional.of(new FieldAdd() + .schema(transform.getAddFieldTransform().getSchema())); + } else { + return Optional.empty(); + } + } + + private static Optional removeFieldToApi(final io.airbyte.protocol.models.transform_models.FieldTransform transform) { + if (transform.getTransformType() == FieldTransformType.REMOVE_FIELD) { + return Optional.of(new FieldRemove() + .schema(transform.getRemoveFieldTransform().getSchema())); + } else { + return Optional.empty(); + } + } + + private static Optional updateFieldToApi(final io.airbyte.protocol.models.transform_models.FieldTransform transform) { + if (transform.getTransformType() == FieldTransformType.UPDATE_FIELD_SCHEMA) { + return Optional.of(new FieldSchemaUpdate() + .oldSchema(transform.getUpdateFieldTransform().getOldSchema()) + .newSchema(transform.getUpdateFieldTransform().getNewSchema())); + } else { + return Optional.empty(); + } + } + +} diff --git a/airbyte-server/src/main/java/io/airbyte/server/converters/JobConverter.java b/airbyte-server/src/main/java/io/airbyte/server/converters/JobConverter.java index 51c329881ea7..4df3a51331b2 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/converters/JobConverter.java +++ b/airbyte-server/src/main/java/io/airbyte/server/converters/JobConverter.java @@ -21,12 +21,15 @@ import io.airbyte.api.model.generated.JobStatus; import io.airbyte.api.model.generated.JobWithAttemptsRead; import io.airbyte.api.model.generated.LogRead; +import io.airbyte.api.model.generated.ResetConfig; import io.airbyte.api.model.generated.SourceDefinitionRead; import io.airbyte.api.model.generated.SynchronousJobRead; import io.airbyte.commons.enums.Enums; import io.airbyte.commons.version.AirbyteVersion; import io.airbyte.config.Configs.WorkerEnvironment; +import io.airbyte.config.JobConfig.ConfigType; import io.airbyte.config.JobOutput; +import io.airbyte.config.ResetSourceConfiguration; import io.airbyte.config.StandardSyncOutput; import io.airbyte.config.StandardSyncSummary; import io.airbyte.config.StreamSyncStats; @@ -41,12 +44,11 @@ import java.nio.file.Path; import java.util.Collections; import java.util.List; +import java.util.Optional; import java.util.stream.Collectors; public class JobConverter { - private static final int LOG_TAIL_SIZE = 1000000; - private final WorkerEnvironment workerEnvironment; private final LogConfigs logConfigs; @@ -58,13 +60,13 @@ public JobConverter(final WorkerEnvironment workerEnvironment, final LogConfigs public JobInfoRead getJobInfoRead(final Job job) { return new JobInfoRead() .job(getJobWithAttemptsRead(job).getJob()) - .attempts(job.getAttempts().stream().map(attempt -> getAttemptInfoRead(attempt)).collect(Collectors.toList())); + .attempts(job.getAttempts().stream().map(this::getAttemptInfoRead).collect(Collectors.toList())); } - public JobDebugRead getDebugJobInfoRead(final JobInfoRead jobInfoRead, - final SourceDefinitionRead sourceDefinitionRead, - final DestinationDefinitionRead destinationDefinitionRead, - final AirbyteVersion airbyteVersion) { + public static JobDebugRead getDebugJobInfoRead(final JobInfoRead jobInfoRead, + final SourceDefinitionRead sourceDefinitionRead, + final DestinationDefinitionRead destinationDefinitionRead, + final AirbyteVersion airbyteVersion) { return new JobDebugRead() .id(jobInfoRead.getJob().getId()) .configId(jobInfoRead.getJob().getConfigId()) @@ -84,10 +86,34 @@ public static JobWithAttemptsRead getJobWithAttemptsRead(final Job job) { .id(job.getId()) .configId(configId) .configType(configType) + .resetConfig(extractResetConfigIfReset(job).orElse(null)) .createdAt(job.getCreatedAtInSecond()) .updatedAt(job.getUpdatedAtInSecond()) .status(Enums.convertTo(job.getStatus(), JobStatus.class))) - .attempts(job.getAttempts().stream().map(attempt -> getAttemptRead(attempt)).collect(Collectors.toList())); + .attempts(job.getAttempts().stream().map(JobConverter::getAttemptRead).toList()); + } + + /** + * If the job is of type RESET, extracts the part of the reset config that we expose in the API. + * Otherwise, returns empty optional. + * + * @param job - job + * @return api representation of reset config + */ + private static Optional extractResetConfigIfReset(final Job job) { + if (job.getConfigType() == ConfigType.RESET_CONNECTION) { + final ResetSourceConfiguration resetSourceConfiguration = job.getConfig().getResetConnection().getResetSourceConfiguration(); + if (resetSourceConfiguration == null) { + return Optional.empty(); + } + return Optional.ofNullable( + new ResetConfig().streamsToReset(job.getConfig().getResetConnection().getResetSourceConfiguration().getStreamsToReset() + .stream() + .map(ProtocolConverters::streamDescriptorToApi) + .toList())); + } else { + return Optional.empty(); + } } public AttemptInfoRead getAttemptInfoRead(final Attempt attempt) { diff --git a/airbyte-server/src/main/java/io/airbyte/server/converters/ProtocolConverters.java b/airbyte-server/src/main/java/io/airbyte/server/converters/ProtocolConverters.java new file mode 100644 index 000000000000..b71771e76da9 --- /dev/null +++ b/airbyte-server/src/main/java/io/airbyte/server/converters/ProtocolConverters.java @@ -0,0 +1,18 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.server.converters; + +import io.airbyte.api.model.generated.StreamDescriptor; + +/** + * Utilities that convert protocol types into API representations of the protocol type. + */ +public class ProtocolConverters { + + public static StreamDescriptor streamDescriptorToApi(final io.airbyte.protocol.models.StreamDescriptor protocolStreamDescriptor) { + return new StreamDescriptor().name(protocolStreamDescriptor.getName()).namespace(protocolStreamDescriptor.getNamespace()); + } + +} diff --git a/airbyte-server/src/main/java/io/airbyte/server/converters/StateConverter.java b/airbyte-server/src/main/java/io/airbyte/server/converters/StateConverter.java new file mode 100644 index 000000000000..946f8988c4cf --- /dev/null +++ b/airbyte-server/src/main/java/io/airbyte/server/converters/StateConverter.java @@ -0,0 +1,100 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.server.converters; + +import io.airbyte.api.model.generated.ConnectionState; +import io.airbyte.api.model.generated.ConnectionStateType; +import io.airbyte.api.model.generated.GlobalState; +import io.airbyte.api.model.generated.StreamState; +import io.airbyte.commons.enums.Enums; +import io.airbyte.config.StateType; +import io.airbyte.config.StateWrapper; +import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStreamState; +import java.util.List; +import java.util.Optional; +import java.util.UUID; +import javax.annotation.Nullable; + +public class StateConverter { + + /** + * Converts internal representation of state to API representation + * + * @param connectionId connection associated with the state + * @param stateWrapper internal state representation to convert + * @return api representation of state + */ + public static ConnectionState toApi(final UUID connectionId, final @Nullable StateWrapper stateWrapper) { + return new ConnectionState() + .connectionId(connectionId) + .stateType(convertStateType(stateWrapper)) + .state(stateWrapper != null ? stateWrapper.getLegacyState() : null) + .globalState(globalStateToApi(stateWrapper).orElse(null)) + .streamState(streamStateToApi(stateWrapper).orElse(null)); + } + + /** + * Convert to API representation of state type. API has an additional type (NOT_SET). This + * represents the case where no state is saved so we do not know the state type. + * + * @param stateWrapper state to convert + * @return api representation of state type + */ + private static ConnectionStateType convertStateType(final @Nullable StateWrapper stateWrapper) { + if (stateWrapper == null || stateWrapper.getStateType() == null) { + return ConnectionStateType.NOT_SET; + } else { + return Enums.convertTo(stateWrapper.getStateType(), ConnectionStateType.class); + } + } + + /** + * If wrapper is of type global state, returns global state. Otherwise, empty optional. + * + * @param stateWrapper state wrapper to extract from + * @return global state if state wrapper is type global. Otherwise, empty optional. + */ + private static Optional globalStateToApi(final @Nullable StateWrapper stateWrapper) { + if (stateWrapper != null + && stateWrapper.getStateType() == StateType.GLOBAL + && stateWrapper.getGlobal() != null + && stateWrapper.getGlobal().getGlobal() != null) { + return Optional.of(new GlobalState() + .sharedState(stateWrapper.getGlobal().getGlobal().getSharedState()) + .streamStates(stateWrapper.getGlobal().getGlobal().getStreamStates() + .stream() + .map(StateConverter::streamStateStructToApi) + .toList())); + } else { + return Optional.empty(); + } + } + + /** + * If wrapper is of type stream state, returns stream state. Otherwise, empty optional. + * + * @param stateWrapper state wrapper to extract from + * @return stream state if state wrapper is type stream. Otherwise, empty optional. + */ + private static Optional> streamStateToApi(final @Nullable StateWrapper stateWrapper) { + if (stateWrapper != null && stateWrapper.getStateType() == StateType.STREAM && stateWrapper.getStateMessages() != null) { + return Optional.ofNullable(stateWrapper.getStateMessages() + .stream() + .map(AirbyteStateMessage::getStream) + .map(StateConverter::streamStateStructToApi) + .toList()); + } else { + return Optional.empty(); + } + } + + private static StreamState streamStateStructToApi(final AirbyteStreamState streamState) { + return new StreamState() + .streamDescriptor(ProtocolConverters.streamDescriptorToApi(streamState.getStreamDescriptor())) + .streamState(streamState.getStreamState()); + } + +} diff --git a/airbyte-server/src/main/java/io/airbyte/server/handlers/ConnectionsHandler.java b/airbyte-server/src/main/java/io/airbyte/server/handlers/ConnectionsHandler.java index 5e46b7cce9eb..2f58d4f66646 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/handlers/ConnectionsHandler.java +++ b/airbyte-server/src/main/java/io/airbyte/server/handlers/ConnectionsHandler.java @@ -10,6 +10,7 @@ import com.google.common.collect.Lists; import io.airbyte.analytics.TrackingClient; import io.airbyte.api.model.generated.AirbyteCatalog; +import io.airbyte.api.model.generated.CatalogDiff; import io.airbyte.api.model.generated.ConnectionCreate; import io.airbyte.api.model.generated.ConnectionRead; import io.airbyte.api.model.generated.ConnectionReadList; @@ -33,10 +34,12 @@ import io.airbyte.config.helpers.ScheduleHelpers; import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.protocol.models.CatalogHelpers; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.scheduler.client.EventRunner; import io.airbyte.scheduler.persistence.WorkspaceHelper; import io.airbyte.server.converters.ApiPojoConverters; +import io.airbyte.server.converters.CatalogDiffConverters; import io.airbyte.server.handlers.helpers.CatalogConverter; import io.airbyte.server.handlers.helpers.ConnectionMatcher; import io.airbyte.server.handlers.helpers.DestinationMatcher; @@ -256,6 +259,15 @@ public ConnectionRead getConnection(final UUID connectionId) return buildConnectionRead(connectionId); } + public static CatalogDiff getDiff(final AirbyteCatalog oldCatalog, final AirbyteCatalog newCatalog) { + return new CatalogDiff().transforms(CatalogHelpers.getCatalogDiff( + CatalogHelpers.configuredCatalogToCatalog(CatalogConverter.toProtocolKeepAllStreams(oldCatalog)), + CatalogHelpers.configuredCatalogToCatalog(CatalogConverter.toProtocolKeepAllStreams(newCatalog))) + .stream() + .map(CatalogDiffConverters::streamTransformToApi) + .toList()); + } + public Optional getConnectionAirbyteCatalog(final UUID connectionId) throws JsonValidationException, ConfigNotFoundException, IOException { final StandardSync connection = configRepository.getStandardSync(connectionId); @@ -303,7 +315,7 @@ public boolean matchSearch(final ConnectionSearch connectionSearch, final Connec matchSearch(connectionSearch.getDestination(), destinationRead); } - // todo (cgardens) - make this static. requires removing one bad dependence in SourceHandlerTest + // todo (cgardens) - make this static. requires removing one bad dependency in SourceHandlerTest public boolean matchSearch(final SourceSearch sourceSearch, final SourceRead sourceRead) { final SourceMatcher sourceMatcher = new SourceMatcher(sourceSearch); final SourceRead sourceReadFromSearch = sourceMatcher.match(sourceRead); @@ -311,7 +323,7 @@ public boolean matchSearch(final SourceSearch sourceSearch, final SourceRead sou return (sourceReadFromSearch == null || sourceReadFromSearch.equals(sourceRead)); } - // todo (cgardens) - make this static. requires removing one bad dependence in + // todo (cgardens) - make this static. requires removing one bad dependency in // DestinationHandlerTest public boolean matchSearch(final DestinationSearch destinationSearch, final DestinationRead destinationRead) { final DestinationMatcher destinationMatcher = new DestinationMatcher(destinationSearch); diff --git a/airbyte-server/src/main/java/io/airbyte/server/handlers/JobHistoryHandler.java b/airbyte-server/src/main/java/io/airbyte/server/handlers/JobHistoryHandler.java index ec005f4a35ac..a5fed32955f4 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/handlers/JobHistoryHandler.java +++ b/airbyte-server/src/main/java/io/airbyte/server/handlers/JobHistoryHandler.java @@ -137,7 +137,7 @@ private JobDebugInfoRead buildJobDebugInfoRead(final JobInfoRead jobInfoRead) final DestinationRead destination = getDestinationRead(connection); final SourceDefinitionRead sourceDefinitionRead = getSourceDefinitionRead(source); final DestinationDefinitionRead destinationDefinitionRead = getDestinationDefinitionRead(destination); - final JobDebugRead jobDebugRead = jobConverter.getDebugJobInfoRead(jobInfoRead, sourceDefinitionRead, destinationDefinitionRead, airbyteVersion); + final JobDebugRead jobDebugRead = JobConverter.getDebugJobInfoRead(jobInfoRead, sourceDefinitionRead, destinationDefinitionRead, airbyteVersion); return new JobDebugInfoRead() .attempts(jobInfoRead.getAttempts()) diff --git a/airbyte-server/src/main/java/io/airbyte/server/handlers/SchedulerHandler.java b/airbyte-server/src/main/java/io/airbyte/server/handlers/SchedulerHandler.java index 16250c869c23..3c27be01ddc7 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/handlers/SchedulerHandler.java +++ b/airbyte-server/src/main/java/io/airbyte/server/handlers/SchedulerHandler.java @@ -13,7 +13,6 @@ import io.airbyte.api.model.generated.CheckConnectionRead; import io.airbyte.api.model.generated.CheckConnectionRead.StatusEnum; import io.airbyte.api.model.generated.ConnectionIdRequestBody; -import io.airbyte.api.model.generated.ConnectionState; import io.airbyte.api.model.generated.DestinationCoreConfig; import io.airbyte.api.model.generated.DestinationDefinitionIdWithWorkspaceId; import io.airbyte.api.model.generated.DestinationDefinitionSpecificationRead; @@ -43,7 +42,6 @@ import io.airbyte.config.StandardCheckConnectionOutput; import io.airbyte.config.StandardDestinationDefinition; import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.State; import io.airbyte.config.helpers.LogConfigs; import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; @@ -317,22 +315,11 @@ public JobInfoRead syncConnection(final ConnectionIdRequestBody connectionIdRequ return submitManualSyncToWorker(connectionIdRequestBody.getConnectionId()); } - public JobInfoRead resetConnection(final ConnectionIdRequestBody connectionIdRequestBody) throws IOException { + public JobInfoRead resetConnection(final ConnectionIdRequestBody connectionIdRequestBody) + throws IOException, JsonValidationException, ConfigNotFoundException { return submitResetConnectionToWorker(connectionIdRequestBody.getConnectionId()); } - public ConnectionState getState(final ConnectionIdRequestBody connectionIdRequestBody) throws IOException { - final Optional currentState = configRepository.getConnectionState(connectionIdRequestBody.getConnectionId()); - LOGGER.info("currentState server: {}", currentState); - - final ConnectionState connectionState = new ConnectionState() - .connectionId(connectionIdRequestBody.getConnectionId()); - - currentState.ifPresent(state -> connectionState.state(state.getState())); - - return connectionState; - } - public JobInfoRead cancelJob(final JobIdRequestBody jobIdRequestBody) throws IOException { return submitCancellationToWorker(jobIdRequestBody.getId()); } @@ -384,8 +371,10 @@ private JobInfoRead submitManualSyncToWorker(final UUID connectionId) throws IOE return readJobFromResult(manualSyncResult); } - private JobInfoRead submitResetConnectionToWorker(final UUID connectionId) throws IOException { - final ManualOperationResult resetConnectionResult = eventRunner.resetConnection(connectionId); + private JobInfoRead submitResetConnectionToWorker(final UUID connectionId) throws IOException, JsonValidationException, ConfigNotFoundException { + final ManualOperationResult resetConnectionResult = eventRunner.resetConnection( + connectionId, + configRepository.getAllStreamsForConnection(connectionId)); return readJobFromResult(resetConnectionResult); } diff --git a/airbyte-server/src/main/java/io/airbyte/server/handlers/StateHandler.java b/airbyte-server/src/main/java/io/airbyte/server/handlers/StateHandler.java new file mode 100644 index 000000000000..4fd36f2f7264 --- /dev/null +++ b/airbyte-server/src/main/java/io/airbyte/server/handlers/StateHandler.java @@ -0,0 +1,30 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.server.handlers; + +import io.airbyte.api.model.generated.ConnectionIdRequestBody; +import io.airbyte.api.model.generated.ConnectionState; +import io.airbyte.config.StateWrapper; +import io.airbyte.config.persistence.StatePersistence; +import io.airbyte.server.converters.StateConverter; +import java.io.IOException; +import java.util.Optional; +import java.util.UUID; + +public class StateHandler { + + private final StatePersistence statePersistence; + + public StateHandler(final StatePersistence statePersistence) { + this.statePersistence = statePersistence; + } + + public ConnectionState getState(final ConnectionIdRequestBody connectionIdRequestBody) throws IOException { + final UUID connectionId = connectionIdRequestBody.getConnectionId(); + final Optional currentState = statePersistence.getCurrentState(connectionId); + return StateConverter.toApi(connectionId, currentState.orElse(null)); + } + +} diff --git a/airbyte-server/src/main/java/io/airbyte/server/handlers/WebBackendConnectionsHandler.java b/airbyte-server/src/main/java/io/airbyte/server/handlers/WebBackendConnectionsHandler.java index c97b717e17fb..7762db930c9f 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/handlers/WebBackendConnectionsHandler.java +++ b/airbyte-server/src/main/java/io/airbyte/server/handlers/WebBackendConnectionsHandler.java @@ -13,10 +13,12 @@ import io.airbyte.api.model.generated.AirbyteStream; import io.airbyte.api.model.generated.AirbyteStreamAndConfiguration; import io.airbyte.api.model.generated.AirbyteStreamConfiguration; +import io.airbyte.api.model.generated.CatalogDiff; import io.airbyte.api.model.generated.ConnectionCreate; import io.airbyte.api.model.generated.ConnectionIdRequestBody; import io.airbyte.api.model.generated.ConnectionRead; import io.airbyte.api.model.generated.ConnectionSearch; +import io.airbyte.api.model.generated.ConnectionStateType; import io.airbyte.api.model.generated.ConnectionUpdate; import io.airbyte.api.model.generated.DestinationIdRequestBody; import io.airbyte.api.model.generated.DestinationRead; @@ -43,12 +45,14 @@ import io.airbyte.api.model.generated.WebBackendWorkspaceState; import io.airbyte.api.model.generated.WebBackendWorkspaceStateResult; import io.airbyte.api.model.generated.WorkspaceIdRequestBody; +import io.airbyte.commons.enums.Enums; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.lang.MoreBooleans; import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; import io.airbyte.scheduler.client.EventRunner; import io.airbyte.validation.json.JsonValidationException; +import io.airbyte.workers.temporal.TemporalClient.ManualOperationResult; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; @@ -57,7 +61,6 @@ import java.util.Optional; import java.util.Set; import java.util.UUID; -import java.util.function.Predicate; import lombok.AllArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -68,12 +71,14 @@ public class WebBackendConnectionsHandler { private static final Set TERMINAL_STATUSES = Sets.newHashSet(JobStatus.FAILED, JobStatus.SUCCEEDED, JobStatus.CANCELLED); private final ConnectionsHandler connectionsHandler; + private final StateHandler stateHandler; private final SourceHandler sourceHandler; private final DestinationHandler destinationHandler; private final JobHistoryHandler jobHistoryHandler; private final SchedulerHandler schedulerHandler; private final OperationsHandler operationsHandler; private final EventRunner eventRunner; + // todo (cgardens) - this handler should NOT have access to the db. only access via handler. private final ConfigRepository configRepository; public WebBackendWorkspaceStateResult getWorkspaceState(final WebBackendWorkspaceState webBackendWorkspaceState) throws IOException { @@ -88,6 +93,10 @@ public WebBackendWorkspaceStateResult getWorkspaceState(final WebBackendWorkspac .hasSources(sourceCount > 0); } + public ConnectionStateType getStateType(final ConnectionIdRequestBody connectionIdRequestBody) throws IOException { + return Enums.convertTo(stateHandler.getState(connectionIdRequestBody).getStateType(), ConnectionStateType.class); + } + public WebBackendConnectionReadList webBackendListConnectionsForWorkspace(final WorkspaceIdRequestBody workspaceIdRequestBody) throws ConfigNotFoundException, IOException, JsonValidationException { @@ -113,14 +122,20 @@ private WebBackendConnectionRead buildWebBackendConnectionRead(final ConnectionR final SourceRead source = getSourceRead(connectionRead); final DestinationRead destination = getDestinationRead(connectionRead); final OperationReadList operations = getOperationReadList(connectionRead); - final WebBackendConnectionRead WebBackendConnectionRead = getWebBackendConnectionRead(connectionRead, source, destination, operations); - final JobReadList syncJobReadList = getSyncJobs(connectionRead); - final Predicate hasRunningJob = (JobRead job) -> !TERMINAL_STATUSES.contains(job.getStatus()); - WebBackendConnectionRead.setIsSyncing(syncJobReadList.getJobs().stream().map(JobWithAttemptsRead::getJob).anyMatch(hasRunningJob)); - setLatestSyncJobProperties(WebBackendConnectionRead, syncJobReadList); - WebBackendConnectionRead.setCatalogId(connectionRead.getSourceCatalogId()); - return WebBackendConnectionRead; + + final WebBackendConnectionRead webBackendConnectionRead = getWebBackendConnectionRead(connectionRead, source, destination, operations) + .catalogId(connectionRead.getSourceCatalogId()) + .isSyncing(syncJobReadList.getJobs() + .stream() + .map(JobWithAttemptsRead::getJob) + .anyMatch(WebBackendConnectionsHandler::isRunningJob)); + setLatestSyncJobProperties(webBackendConnectionRead, syncJobReadList); + return webBackendConnectionRead; + } + + private static boolean isRunningJob(final JobRead job) { + return !TERMINAL_STATUSES.contains(job.getStatus()); } private SourceRead getSourceRead(final ConnectionRead connectionRead) throws JsonValidationException, IOException, ConfigNotFoundException { @@ -140,10 +155,10 @@ private OperationReadList getOperationReadList(final ConnectionRead connectionRe return operationsHandler.listOperationsForConnection(connectionIdRequestBody); } - private WebBackendConnectionRead getWebBackendConnectionRead(final ConnectionRead connectionRead, - final SourceRead source, - final DestinationRead destination, - final OperationReadList operations) { + private static WebBackendConnectionRead getWebBackendConnectionRead(final ConnectionRead connectionRead, + final SourceRead source, + final DestinationRead destination, + final OperationReadList operations) { return new WebBackendConnectionRead() .connectionId(connectionRead.getConnectionId()) .sourceId(connectionRead.getSourceId()) @@ -169,7 +184,7 @@ private JobReadList getSyncJobs(final ConnectionRead connectionRead) throws IOEx return jobHistoryHandler.listJobsFor(jobListRequestBody); } - private void setLatestSyncJobProperties(final WebBackendConnectionRead WebBackendConnectionRead, final JobReadList syncJobReadList) { + private static void setLatestSyncJobProperties(final WebBackendConnectionRead WebBackendConnectionRead, final JobReadList syncJobReadList) { syncJobReadList.getJobs().stream().map(JobWithAttemptsRead::getJob).findFirst() .ifPresent(job -> { WebBackendConnectionRead.setLatestSyncJobCreatedAt(job.getCreatedAt()); @@ -190,50 +205,108 @@ public WebBackendConnectionReadList webBackendSearchConnections(final WebBackend return new WebBackendConnectionReadList().connections(reads); } + // todo (cgardens) - This logic is a headache to follow it stems from the internal data model not + // tracking selected streams in any reasonable way. We should update that. public WebBackendConnectionRead webBackendGetConnection(final WebBackendConnectionRequestBody webBackendConnectionRequestBody) throws ConfigNotFoundException, IOException, JsonValidationException { final ConnectionIdRequestBody connectionIdRequestBody = new ConnectionIdRequestBody() .connectionId(webBackendConnectionRequestBody.getConnectionId()); final ConnectionRead connection = connectionsHandler.getConnection(connectionIdRequestBody.getConnectionId()); - - final Optional discovered; + /* + * This variable contains all configuration but will be missing streams that were not selected. + */ + final AirbyteCatalog configuredCatalog = connection.getSyncCatalog(); + /* + * This catalog represents the full catalog that was used to create the configured catalog. It will + * have all streams that were present at the time. It will have no configuration set. + */ + final Optional catalogUsedToMakeConfiguredCatalog = connectionsHandler + .getConnectionAirbyteCatalog(webBackendConnectionRequestBody.getConnectionId()); + + /* + * This catalog represents the full catalog that exists now for the source. It will have no + * configuration set. + */ + final Optional refreshedCatalog; if (MoreBooleans.isTruthy(webBackendConnectionRequestBody.getWithRefreshedCatalog())) { - final SourceDiscoverSchemaRequestBody discoverSchemaReadReq = - new SourceDiscoverSchemaRequestBody().sourceId(connection.getSourceId()).disableCache(true); - final SourceDiscoverSchemaRead discoverSchema = schedulerHandler.discoverSchemaForSourceFromSourceId(discoverSchemaReadReq); - - discovered = Optional.of(discoverSchema.getCatalog()); - connection.setSourceCatalogId(discoverSchema.getCatalogId()); + refreshedCatalog = getRefreshedSchema(connection.getSourceId()); } else { - discovered = connectionsHandler.getConnectionAirbyteCatalog(webBackendConnectionRequestBody.getConnectionId()); + refreshedCatalog = Optional.empty(); } - final AirbyteCatalog original = connection.getSyncCatalog(); - if (discovered.isPresent()) { - final AirbyteCatalog combined = updateSchemaWithDiscovery(original, discovered.get()); - connection.setSyncCatalog(combined); + + final CatalogDiff diff; + final AirbyteCatalog syncCatalog; + if (refreshedCatalog.isPresent()) { + connection.setSourceCatalogId(refreshedCatalog.get().getCatalogId()); + /* + * constructs a full picture of all existing configured + all new / updated streams in the newest + * catalog. + */ + syncCatalog = updateSchemaWithDiscovery(configuredCatalog, refreshedCatalog.get().getCatalog()); + /* + * Diffing the catalog used to make the configured catalog gives us the clearest diff between the + * schema when the configured catalog was made and now. In the case where we do not have the + * original catalog used to make the configured catalog, we make due, but using the configured + * catalog itself. The drawback is that any stream that was not selected in the configured catalog + * but was present at time of configuration will appear in the diff as an added stream which is + * confusing. We need to figure out why source_catalog_id is not always populated in the db. + */ + diff = ConnectionsHandler.getDiff(catalogUsedToMakeConfiguredCatalog.orElse(configuredCatalog), refreshedCatalog.get().getCatalog()); + } else if (catalogUsedToMakeConfiguredCatalog.isPresent()) { + // reconstructs a full picture of the full schema at the time the catalog was configured. + syncCatalog = updateSchemaWithDiscovery(configuredCatalog, catalogUsedToMakeConfiguredCatalog.get()); + // diff not relevant if there was no refresh. + diff = null; } else { - connection.setSyncCatalog(original); + // fallback. over time this should be rarely used because source_catalog_id should always be set. + syncCatalog = configuredCatalog; + // diff not relevant if there was no refresh. + diff = null; } - return buildWebBackendConnectionRead(connection); + + connection.setSyncCatalog(syncCatalog); + return buildWebBackendConnectionRead(connection).catalogDiff(diff); } + private Optional getRefreshedSchema(final UUID sourceId) + throws JsonValidationException, ConfigNotFoundException, IOException { + final SourceDiscoverSchemaRequestBody discoverSchemaReadReq = new SourceDiscoverSchemaRequestBody() + .sourceId(sourceId) + .disableCache(true); + return Optional.ofNullable(schedulerHandler.discoverSchemaForSourceFromSourceId(discoverSchemaReadReq)); + } + + /** + * Applies existing configurations to a newly discovered catalog. For example, if the users stream + * is in the old and new catalog, any configuration that was previously set for users, we add to the + * new catalog. + * + * @param original fully configured, original catalog + * @param discovered newly discovered catalog, no configurations set + * @return merged catalog, most up-to-date schema with most up-to-date configurations from old + * catalog + */ @VisibleForTesting protected static AirbyteCatalog updateSchemaWithDiscovery(final AirbyteCatalog original, final AirbyteCatalog discovered) { - final Map originalStreamsByName = original.getStreams() + /* + * We can't directly use s.getStream() as the key, because it contains a bunch of other fields, so + * we just define a quick-and-dirty record class. + */ + final Map streamDescriptorToOriginalStream = original.getStreams() .stream() - .collect(toMap(s -> s.getStream().getName(), s -> s)); + .collect(toMap(s -> new Stream(s.getStream().getName(), s.getStream().getNamespace()), s -> s)); final List streams = new ArrayList<>(); - for (final AirbyteStreamAndConfiguration s : discovered.getStreams()) { - final AirbyteStream stream = s.getStream(); - final AirbyteStreamAndConfiguration originalStream = originalStreamsByName.get(stream.getName()); + for (final AirbyteStreamAndConfiguration discoveredStream : discovered.getStreams()) { + final AirbyteStream stream = discoveredStream.getStream(); + final AirbyteStreamAndConfiguration originalStream = streamDescriptorToOriginalStream.get(new Stream(stream.getName(), stream.getNamespace())); final AirbyteStreamConfiguration outputStreamConfig; if (originalStream != null) { final AirbyteStreamConfiguration originalStreamConfig = originalStream.getConfig(); - final AirbyteStreamConfiguration discoveredStreamConfig = s.getConfig(); + final AirbyteStreamConfiguration discoveredStreamConfig = discoveredStream.getConfig(); outputStreamConfig = new AirbyteStreamConfiguration(); if (stream.getSupportedSyncModes().contains(originalStreamConfig.getSyncMode())) { @@ -256,9 +329,9 @@ protected static AirbyteCatalog updateSchemaWithDiscovery(final AirbyteCatalog o } outputStreamConfig.setAliasName(originalStreamConfig.getAliasName()); - outputStreamConfig.setSelected(true); + outputStreamConfig.setSelected(originalStream.getConfig().getSelected()); } else { - outputStreamConfig = s.getConfig(); + outputStreamConfig = discoveredStream.getConfig(); outputStreamConfig.setSelected(false); } final AirbyteStreamAndConfiguration outputStream = new AirbyteStreamAndConfiguration() @@ -288,14 +361,26 @@ public WebBackendConnectionRead webBackendUpdateConnection(final WebBackendConne connectionRead = connectionsHandler.updateConnection(connectionUpdate); if (needReset) { - eventRunner.synchronousResetConnection(webBackendConnectionUpdate.getConnectionId()); - eventRunner.startNewManualSync(webBackendConnectionUpdate.getConnectionId()); + ManualOperationResult manualOperationResult = eventRunner.synchronousResetConnection( + webBackendConnectionUpdate.getConnectionId(), + // TODO (https://github.com/airbytehq/airbyte/issues/12741): change this to only get new/updated + // streams, instead of all + configRepository.getAllStreamsForConnection(webBackendConnectionUpdate.getConnectionId())); + verifyManualOperationResult(manualOperationResult); + manualOperationResult = eventRunner.startNewManualSync(webBackendConnectionUpdate.getConnectionId()); + verifyManualOperationResult(manualOperationResult); connectionRead = connectionsHandler.getConnection(connectionUpdate.getConnectionId()); } return buildWebBackendConnectionRead(connectionRead); } + private void verifyManualOperationResult(final ManualOperationResult manualOperationResult) throws IllegalStateException { + if (manualOperationResult.getFailingReason().isPresent()) { + throw new IllegalStateException(manualOperationResult.getFailingReason().get()); + } + } + private List createOperations(final WebBackendConnectionCreate webBackendConnectionCreate) throws JsonValidationException, ConfigNotFoundException, IOException { final List operationIds = new ArrayList<>(); @@ -326,15 +411,6 @@ private List updateOperations(final WebBackendConnectionUpdate webBackendC return operationIds; } - private UUID getWorkspaceIdForConnection(final UUID connectionId) throws JsonValidationException, ConfigNotFoundException, IOException { - final UUID sourceId = connectionsHandler.getConnection(connectionId).getSourceId(); - return getWorkspaceIdForSource(sourceId); - } - - private UUID getWorkspaceIdForSource(final UUID sourceId) throws JsonValidationException, ConfigNotFoundException, IOException { - return sourceHandler.getSource(new SourceIdRequestBody().sourceId(sourceId)).getWorkspaceId(); - } - @VisibleForTesting protected static OperationCreate toOperationCreate(final WebBackendOperationCreateOrUpdate operationCreateOrUpdate) { final OperationCreate operationCreate = new OperationCreate(); @@ -412,4 +488,13 @@ protected static ConnectionSearch toConnectionSearch(final WebBackendConnectionS .status(webBackendConnectionSearch.getStatus()); } + /** + * Equivalent to {@see io.airbyte.integrations.base.AirbyteStreamNameNamespacePair}. Intentionally + * not using that class because it doesn't make sense for airbyte-server to depend on + * base-java-integration. + */ + private record Stream(String name, String namespace) { + + } + } diff --git a/airbyte-server/src/main/java/io/airbyte/server/handlers/helpers/CatalogConverter.java b/airbyte-server/src/main/java/io/airbyte/server/handlers/helpers/CatalogConverter.java index e4584ed77ab5..ad2ae7e4e194 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/handlers/helpers/CatalogConverter.java +++ b/airbyte-server/src/main/java/io/airbyte/server/handlers/helpers/CatalogConverter.java @@ -4,7 +4,10 @@ package io.airbyte.server.handlers.helpers; +import io.airbyte.api.model.generated.AirbyteCatalog; +import io.airbyte.api.model.generated.AirbyteStream; import io.airbyte.commons.enums.Enums; +import io.airbyte.commons.json.Jsons; import io.airbyte.commons.text.Names; import java.util.List; import java.util.stream.Collectors; @@ -83,6 +86,29 @@ public static io.airbyte.api.model.generated.AirbyteCatalog toApi(final io.airby return new io.airbyte.api.model.generated.AirbyteCatalog().streams(streams); } + /** + * Converts the API catalog model into a protocol catalog. Note: returns all streams, regardless of + * selected status. See {@link CatalogConverter#toProtocol(AirbyteStream)} for context. + * + * @param catalog api catalog + * @return protocol catalog + */ + public static io.airbyte.protocol.models.ConfiguredAirbyteCatalog toProtocolKeepAllStreams(final io.airbyte.api.model.generated.AirbyteCatalog catalog) { + final AirbyteCatalog clone = Jsons.clone(catalog); + clone.getStreams().forEach(stream -> stream.getConfig().setSelected(true)); + return toProtocol(clone); + } + + /** + * Converts the API catalog model into a protocol catalog. Note: only streams marked as selected + * will be returned. This is included in this converter as the API model always carries all the + * streams it has access to and then marks the ones that should not be used as not selected, while + * the protocol version just uses the presence of the streams as evidence that it should be + * included. + * + * @param catalog api catalog + * @return protocol catalog + */ public static io.airbyte.protocol.models.ConfiguredAirbyteCatalog toProtocol(final io.airbyte.api.model.generated.AirbyteCatalog catalog) { final List streams = catalog.getStreams() .stream() diff --git a/airbyte-server/src/main/java/io/airbyte/server/handlers/helpers/ConnectionMatcher.java b/airbyte-server/src/main/java/io/airbyte/server/handlers/helpers/ConnectionMatcher.java index 22c2e7777d8b..645c9daf6efb 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/handlers/helpers/ConnectionMatcher.java +++ b/airbyte-server/src/main/java/io/airbyte/server/handlers/helpers/ConnectionMatcher.java @@ -40,6 +40,7 @@ public ConnectionRead match(final ConnectionRead query) { fromSearch.resourceRequirements(query.getResourceRequirements()); fromSearch.syncCatalog(query.getSyncCatalog()); fromSearch.operationIds(query.getOperationIds()); + fromSearch.sourceCatalogId(query.getSourceCatalogId()); return fromSearch; } diff --git a/airbyte-server/src/main/java/io/airbyte/server/version_mismatch/VersionMismatchServer.java b/airbyte-server/src/main/java/io/airbyte/server/version_mismatch/VersionMismatchServer.java deleted file mode 100644 index b15bf640424a..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/version_mismatch/VersionMismatchServer.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Copyright (c) 2022 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.version_mismatch; - -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.version.AirbyteVersion; -import io.airbyte.server.CorsFilter; -import io.airbyte.server.ServerRunnable; -import java.io.IOException; -import java.util.Map; -import javax.servlet.http.HttpServlet; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; -import org.eclipse.jetty.server.Server; -import org.eclipse.jetty.servlet.ServletContextHandler; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Serves an error for any call. This is only used if the server has a different version than the - * stored version in the database, which means that there is a "version mismatch". When a version - * mismatch occurs, a migration is required to upgrade the database. Until then, we show errors - * using this server in order to prevent getting into a bad state. - */ -public class VersionMismatchServer implements ServerRunnable { - - private static final Logger LOGGER = LoggerFactory.getLogger(VersionMismatchServer.class); - private final AirbyteVersion version1; - private final AirbyteVersion version2; - private final int port; - - public VersionMismatchServer(final AirbyteVersion version1, final AirbyteVersion version2, final int port) { - this.version1 = version1; - this.version2 = version2; - this.port = port; - } - - @Override - public void start() throws Exception { - final Server server = getServer(); - server.start(); - server.join(); - } - - protected Server getServer() { - final String errorMessage = AirbyteVersion.getErrorMessage(version1, version2); - LOGGER.error(errorMessage); - final Server server = new Server(port); - VersionMismatchServlet.ERROR_MESSAGE = errorMessage; - final ServletContextHandler handler = new ServletContextHandler(); - handler.addServlet(VersionMismatchServlet.class, "/*"); - server.setHandler(handler); - - return server; - } - - public static class VersionMismatchServlet extends HttpServlet { - - // this error message should be overwritten before any requests are served - public static String ERROR_MESSAGE = "Versions don't match!"; - - public void doPost(final HttpServletRequest request, final HttpServletResponse response) throws IOException { - this.serveDefaultRequest(response); - } - - public void doGet(final HttpServletRequest request, final HttpServletResponse response) throws IOException { - this.serveDefaultRequest(response); - } - - public void doOptions(final HttpServletRequest request, final HttpServletResponse response) throws IOException { - this.addCorsHeaders(response); - } - - private void serveDefaultRequest(final HttpServletResponse response) throws IOException { - final var outputMap = ImmutableMap.of("error", ERROR_MESSAGE); - - this.addCorsHeaders(response); - - response.setContentType("application/json"); - response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); - response.getWriter().println(Jsons.serialize(outputMap)); - } - - private void addCorsHeaders(final HttpServletResponse response) { - for (final Map.Entry entry : CorsFilter.MAP.entrySet()) { - response.setHeader(entry.getKey(), entry.getValue()); - } - } - - } - -} diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/ConfigurationApiTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/ConfigurationApiTest.java index efcdde44f6c2..36f6b211ad9d 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/ConfigurationApiTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/apis/ConfigurationApiTest.java @@ -18,6 +18,7 @@ import io.airbyte.config.persistence.ConfigRepository; import io.airbyte.config.persistence.SecretsRepositoryReader; import io.airbyte.config.persistence.SecretsRepositoryWriter; +import io.airbyte.config.persistence.StatePersistence; import io.airbyte.db.Database; import io.airbyte.scheduler.client.EventRunner; import io.airbyte.scheduler.client.SynchronousSchedulerClient; @@ -45,6 +46,7 @@ void testImportDefinitions() { mock(FileTtlManager.class), mock(Database.class), mock(Database.class), + mock(StatePersistence.class), mock(TrackingClient.class), WorkerEnvironment.DOCKER, LogConfigs.EMPTY, @@ -54,7 +56,7 @@ void testImportDefinitions() { mock(EventRunner.class), mock(Flyway.class), mock(Flyway.class)); - assertTrue(configurationApi.canImportDefinitons()); + assertTrue(configurationApi.canImportDefinitions()); } } diff --git a/airbyte-server/src/test/java/io/airbyte/server/converters/CatalogDiffConvertersTest.java b/airbyte-server/src/test/java/io/airbyte/server/converters/CatalogDiffConvertersTest.java new file mode 100644 index 000000000000..d9108cd6f577 --- /dev/null +++ b/airbyte-server/src/test/java/io/airbyte/server/converters/CatalogDiffConvertersTest.java @@ -0,0 +1,24 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.server.converters; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +import io.airbyte.api.model.generated.FieldTransform; +import io.airbyte.api.model.generated.StreamTransform; +import io.airbyte.commons.enums.Enums; +import io.airbyte.protocol.models.transform_models.FieldTransformType; +import io.airbyte.protocol.models.transform_models.StreamTransformType; +import org.junit.jupiter.api.Test; + +class CatalogDiffConvertersTest { + + @Test + void testEnumConversion() { + assertTrue(Enums.isCompatible(StreamTransform.TransformTypeEnum.class, StreamTransformType.class)); + assertTrue(Enums.isCompatible(FieldTransform.TransformTypeEnum.class, FieldTransformType.class)); + } + +} diff --git a/airbyte-server/src/test/java/io/airbyte/server/converters/JobConverterTest.java b/airbyte-server/src/test/java/io/airbyte/server/converters/JobConverterTest.java index 1ef32d8f919a..5e37872494f5 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/converters/JobConverterTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/converters/JobConverterTest.java @@ -5,6 +5,7 @@ package io.airbyte.server.converters; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -25,28 +26,38 @@ import io.airbyte.api.model.generated.JobRead; import io.airbyte.api.model.generated.JobWithAttemptsRead; import io.airbyte.api.model.generated.LogRead; +import io.airbyte.api.model.generated.ResetConfig; import io.airbyte.api.model.generated.SourceDefinitionRead; +import io.airbyte.api.model.generated.StreamDescriptor; import io.airbyte.commons.enums.Enums; import io.airbyte.commons.version.AirbyteVersion; import io.airbyte.config.Configs.WorkerEnvironment; import io.airbyte.config.FailureReason; import io.airbyte.config.FailureReason.FailureOrigin; import io.airbyte.config.FailureReason.FailureType; -import io.airbyte.config.JobCheckConnectionConfig; import io.airbyte.config.JobConfig; +import io.airbyte.config.JobConfig.ConfigType; import io.airbyte.config.JobOutput; import io.airbyte.config.JobOutput.OutputType; +import io.airbyte.config.JobResetConnectionConfig; +import io.airbyte.config.JobSyncConfig; +import io.airbyte.config.ResetSourceConfiguration; import io.airbyte.config.StandardSyncOutput; import io.airbyte.config.StandardSyncSummary; import io.airbyte.config.StreamSyncStats; import io.airbyte.config.SyncStats; import io.airbyte.config.helpers.LogConfigs; +import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import io.airbyte.protocol.models.ConfiguredAirbyteStream; import io.airbyte.scheduler.models.Attempt; import io.airbyte.scheduler.models.AttemptStatus; import io.airbyte.scheduler.models.Job; import io.airbyte.scheduler.models.JobStatus; import java.nio.file.Path; import java.util.ArrayList; +import java.util.Collections; +import java.util.List; import java.util.Optional; import java.util.UUID; import java.util.stream.Collectors; @@ -60,10 +71,7 @@ class JobConverterTest { private static final String JOB_CONFIG_ID = "123"; private static final JobStatus JOB_STATUS = JobStatus.RUNNING; private static final AttemptStatus ATTEMPT_STATUS = AttemptStatus.RUNNING; - private static final JobConfig.ConfigType CONFIG_TYPE = JobConfig.ConfigType.CHECK_CONNECTION_SOURCE; - private static final JobConfig JOB_CONFIG = new JobConfig() - .withConfigType(CONFIG_TYPE) - .withCheckConnection(new JobCheckConnectionConfig()); + private static final JobConfig.ConfigType CONFIG_TYPE = ConfigType.SYNC; private static final Path LOG_PATH = Path.of("log_path"); private static final long CREATED_AT = System.currentTimeMillis() / 1000; private static final long RECORDS_EMITTED = 15L; @@ -76,6 +84,12 @@ class JobConverterTest { private static final String FAILURE_STACKTRACE = "stacktrace"; private static final boolean PARTIAL_SUCCESS = false; + private static final JobConfig JOB_CONFIG = new JobConfig() + .withConfigType(CONFIG_TYPE) + .withSync(new JobSyncConfig().withConfiguredAirbyteCatalog(new ConfiguredAirbyteCatalog().withStreams(List.of( + new ConfiguredAirbyteStream().withStream(new AirbyteStream().withName("users")), + new ConfiguredAirbyteStream().withStream(new AirbyteStream().withName("accounts")))))); + private static final JobOutput JOB_OUTPUT = new JobOutput() .withOutputType(OutputType.SYNC) .withSync(new StandardSyncOutput() @@ -104,7 +118,7 @@ class JobConverterTest { .id(JOB_ID) .configId(JOB_CONFIG_ID) .status(io.airbyte.api.model.generated.JobStatus.RUNNING) - .configType(JobConfigType.CHECK_CONNECTION_SOURCE) + .configType(JobConfigType.SYNC) .createdAt(CREATED_AT) .updatedAt(CREATED_AT)) .attempts(Lists.newArrayList(new AttemptInfoRead() @@ -149,7 +163,7 @@ class JobConverterTest { .id(JOB_ID) .configId(JOB_CONFIG_ID) .status(io.airbyte.api.model.generated.JobStatus.RUNNING) - .configType(JobConfigType.CHECK_CONNECTION_SOURCE) + .configType(JobConfigType.SYNC) .airbyteVersion(airbyteVersion.serialize()) .sourceDefinition(sourceDefinitionRead) .destinationDefinition(destinationDefinitionRead); @@ -192,31 +206,75 @@ public void setUp() { } @Test - public void testGetJobInfoRead() { + void testGetJobInfoRead() { assertEquals(JOB_INFO, jobConverter.getJobInfoRead(job)); } @Test - public void testGetDebugJobInfoRead() { - assertEquals(JOB_DEBUG_INFO, jobConverter.getDebugJobInfoRead(JOB_INFO, sourceDefinitionRead, destinationDefinitionRead, airbyteVersion)); + void testGetDebugJobInfoRead() { + assertEquals(JOB_DEBUG_INFO, JobConverter.getDebugJobInfoRead(JOB_INFO, sourceDefinitionRead, destinationDefinitionRead, airbyteVersion)); } @Test - public void testGetJobWithAttemptsRead() { - assertEquals(JOB_WITH_ATTEMPTS_READ, jobConverter.getJobWithAttemptsRead(job)); + void testGetJobWithAttemptsRead() { + assertEquals(JOB_WITH_ATTEMPTS_READ, JobConverter.getJobWithAttemptsRead(job)); } @Test - public void testGetJobRead() { - final JobWithAttemptsRead jobReadActual = jobConverter.getJobWithAttemptsRead(job); + void testGetJobRead() { + final JobWithAttemptsRead jobReadActual = JobConverter.getJobWithAttemptsRead(job); assertEquals(JOB_WITH_ATTEMPTS_READ, jobReadActual); } @Test - public void testEnumConversion() { + void testEnumConversion() { assertTrue(Enums.isCompatible(JobConfig.ConfigType.class, JobConfigType.class)); assertTrue(Enums.isCompatible(JobStatus.class, io.airbyte.api.model.generated.JobStatus.class)); assertTrue(Enums.isCompatible(AttemptStatus.class, io.airbyte.api.model.generated.AttemptStatus.class)); } + // this test intentionally only looks at the reset config as the rest is the same here. + @Test + void testResetJobIncludesResetConfig() { + final JobConfig resetConfig = new JobConfig() + .withConfigType(ConfigType.RESET_CONNECTION) + .withResetConnection(new JobResetConnectionConfig().withResetSourceConfiguration(new ResetSourceConfiguration().withStreamsToReset(List.of( + new io.airbyte.protocol.models.StreamDescriptor().withName("users"), + new io.airbyte.protocol.models.StreamDescriptor().withName("accounts"))))); + final Job resetJob = new Job( + JOB_ID, + ConfigType.RESET_CONNECTION, + JOB_CONFIG_ID, + resetConfig, + Collections.emptyList(), + JobStatus.SUCCEEDED, + CREATED_AT, + CREATED_AT, + CREATED_AT); + + final ResetConfig expectedResetConfig = new ResetConfig().streamsToReset(List.of( + new StreamDescriptor().name("users"), + new StreamDescriptor().name("accounts"))); + assertEquals(expectedResetConfig, jobConverter.getJobInfoRead(resetJob).getJob().getResetConfig()); + } + + @Test + void testResetJobExcludesConfigIfNull() { + final JobConfig resetConfig = new JobConfig() + .withConfigType(ConfigType.RESET_CONNECTION) + .withResetConnection(new JobResetConnectionConfig().withResetSourceConfiguration(null)); + final Job resetJob = new Job( + JOB_ID, + ConfigType.RESET_CONNECTION, + JOB_CONFIG_ID, + resetConfig, + Collections.emptyList(), + JobStatus.SUCCEEDED, + CREATED_AT, + CREATED_AT, + CREATED_AT); + + assertNull(jobConverter.getJobInfoRead(resetJob).getJob().getResetConfig()); + } + } diff --git a/airbyte-server/src/test/java/io/airbyte/server/handlers/ConnectionsHandlerTest.java b/airbyte-server/src/test/java/io/airbyte/server/handlers/ConnectionsHandlerTest.java index 5a4128595e35..17ffa3386a58 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/handlers/ConnectionsHandlerTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/handlers/ConnectionsHandlerTest.java @@ -115,7 +115,8 @@ void setUp() throws IOException, JsonValidationException, ConfigNotFoundExceptio .withOperationIds(List.of(operationId)) .withManual(false) .withSchedule(ConnectionHelpers.generateBasicSchedule()) - .withResourceRequirements(ConnectionHelpers.TESTING_RESOURCE_REQUIREMENTS); + .withResourceRequirements(ConnectionHelpers.TESTING_RESOURCE_REQUIREMENTS) + .withSourceCatalogId(UUID.randomUUID()); standardSyncDeleted = new StandardSync() .withConnectionId(connectionId) .withName("presto to hudi2") @@ -194,7 +195,8 @@ void testCreateConnection() throws JsonValidationException, ConfigNotFoundExcept .cpuRequest(standardSync.getResourceRequirements().getCpuRequest()) .cpuLimit(standardSync.getResourceRequirements().getCpuLimit()) .memoryRequest(standardSync.getResourceRequirements().getMemoryRequest()) - .memoryLimit(standardSync.getResourceRequirements().getMemoryLimit())); + .memoryLimit(standardSync.getResourceRequirements().getMemoryLimit())) + .sourceCatalogId(standardSync.getSourceCatalogId()); final ConnectionRead actualConnectionRead = connectionsHandler.createConnection(connectionCreate); @@ -343,7 +345,8 @@ void testUpdateConnection() throws JsonValidationException, ConfigNotFoundExcept standardSync.getConnectionId(), standardSync.getSourceId(), standardSync.getDestinationId(), - standardSync.getOperationIds()) + standardSync.getOperationIds(), + newSourceCatalogId) .schedule(null) .syncCatalog(catalog) .status(ConnectionStatus.INACTIVE); diff --git a/airbyte-server/src/test/java/io/airbyte/server/handlers/SchedulerHandlerTest.java b/airbyte-server/src/test/java/io/airbyte/server/handlers/SchedulerHandlerTest.java index 4e2180b7965b..f67f6e0321cd 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/handlers/SchedulerHandlerTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/handlers/SchedulerHandlerTest.java @@ -19,10 +19,8 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -import com.google.common.collect.ImmutableMap; import io.airbyte.api.model.generated.CheckConnectionRead; import io.airbyte.api.model.generated.ConnectionIdRequestBody; -import io.airbyte.api.model.generated.ConnectionState; import io.airbyte.api.model.generated.DestinationCoreConfig; import io.airbyte.api.model.generated.DestinationDefinitionIdWithWorkspaceId; import io.airbyte.api.model.generated.DestinationDefinitionSpecificationRead; @@ -49,16 +47,17 @@ import io.airbyte.config.StandardCheckConnectionOutput; import io.airbyte.config.StandardDestinationDefinition; import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.State; import io.airbyte.config.helpers.LogConfigs; import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; import io.airbyte.config.persistence.SecretsRepositoryWriter; +import io.airbyte.config.persistence.StatePersistence; import io.airbyte.protocol.models.AirbyteCatalog; import io.airbyte.protocol.models.CatalogHelpers; import io.airbyte.protocol.models.ConnectorSpecification; import io.airbyte.protocol.models.Field; import io.airbyte.protocol.models.JsonSchemaType; +import io.airbyte.protocol.models.StreamDescriptor; import io.airbyte.scheduler.client.EventRunner; import io.airbyte.scheduler.client.SynchronousJobMetadata; import io.airbyte.scheduler.client.SynchronousResponse; @@ -76,6 +75,7 @@ import java.io.IOException; import java.net.URI; import java.util.HashMap; +import java.util.List; import java.util.Optional; import java.util.UUID; import org.junit.jupiter.api.BeforeEach; @@ -115,6 +115,8 @@ class SchedulerHandlerTest { .withChangelogUrl(Exceptions.toRuntime(() -> new URI("https://google.com"))) .withConnectionSpecification(Jsons.jsonNode(new HashMap<>())); + private static final StreamDescriptor STREAM_DESCRIPTOR = new StreamDescriptor().withName("1"); + private SchedulerHandler schedulerHandler; private ConfigRepository configRepository; private SecretsRepositoryWriter secretsRepositoryWriter; @@ -126,6 +128,7 @@ class SchedulerHandlerTest { private JobPersistence jobPersistence; private EventRunner eventRunner; private JobConverter jobConverter; + private StatePersistence statePersistence; @BeforeEach void setup() { @@ -141,6 +144,7 @@ void setup() { configRepository = mock(ConfigRepository.class); secretsRepositoryWriter = mock(SecretsRepositoryWriter.class); jobPersistence = mock(JobPersistence.class); + statePersistence = mock(StatePersistence.class); eventRunner = mock(EventRunner.class); jobConverter = spy(new JobConverter(WorkerEnvironment.DOCKER, LogConfigs.EMPTY)); @@ -555,25 +559,6 @@ void testDiscoverSchemaForSourceFromSourceCreateFailed() throws JsonValidationEx verify(synchronousSchedulerClient).createDiscoverSchemaJob(source, SOURCE_DOCKER_IMAGE); } - @Test - void testGetCurrentState() throws IOException { - final UUID connectionId = UUID.randomUUID(); - final State state = new State().withState(Jsons.jsonNode(ImmutableMap.of("checkpoint", 1))); - when(configRepository.getConnectionState(connectionId)).thenReturn(Optional.of(state)); - - final ConnectionState connectionState = schedulerHandler.getState(new ConnectionIdRequestBody().connectionId(connectionId)); - assertEquals(new ConnectionState().connectionId(connectionId).state(state.getState()), connectionState); - } - - @Test - void testGetCurrentStateEmpty() throws IOException { - final UUID connectionId = UUID.randomUUID(); - when(configRepository.getConnectionState(connectionId)).thenReturn(Optional.empty()); - - final ConnectionState connectionState = schedulerHandler.getState(new ConnectionIdRequestBody().connectionId(connectionId)); - assertEquals(new ConnectionState().connectionId(connectionId), connectionState); - } - @Test void testEnumConversion() { assertTrue(Enums.isCompatible(StandardCheckConnectionOutput.Status.class, CheckConnectionRead.StatusEnum.class)); @@ -603,7 +588,7 @@ void testSyncConnection() throws IOException { } @Test - void testResetConnection() throws IOException { + void testResetConnection() throws IOException, JsonValidationException, ConfigNotFoundException { final UUID connectionId = UUID.randomUUID(); final long jobId = 123L; @@ -613,7 +598,11 @@ void testResetConnection() throws IOException { .jobId(Optional.of(jobId)) .build(); - when(eventRunner.resetConnection(connectionId)) + final List streamDescriptors = List.of(STREAM_DESCRIPTOR); + when(configRepository.getAllStreamsForConnection(connectionId)) + .thenReturn(streamDescriptors); + + when(eventRunner.resetConnection(connectionId, streamDescriptors)) .thenReturn(manualOperationResult); doReturn(new JobInfoRead()) @@ -621,7 +610,7 @@ void testResetConnection() throws IOException { schedulerHandler.resetConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - verify(eventRunner).resetConnection(connectionId); + verify(eventRunner).resetConnection(connectionId, streamDescriptors); } @Test diff --git a/airbyte-server/src/test/java/io/airbyte/server/handlers/StateHandlerTest.java b/airbyte-server/src/test/java/io/airbyte/server/handlers/StateHandlerTest.java new file mode 100644 index 000000000000..e99303875ce5 --- /dev/null +++ b/airbyte-server/src/test/java/io/airbyte/server/handlers/StateHandlerTest.java @@ -0,0 +1,139 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.server.handlers; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.api.model.generated.ConnectionIdRequestBody; +import io.airbyte.api.model.generated.ConnectionState; +import io.airbyte.api.model.generated.ConnectionStateType; +import io.airbyte.api.model.generated.GlobalState; +import io.airbyte.api.model.generated.StreamState; +import io.airbyte.commons.enums.Enums; +import io.airbyte.commons.json.Jsons; +import io.airbyte.config.StateType; +import io.airbyte.config.StateWrapper; +import io.airbyte.config.persistence.StatePersistence; +import io.airbyte.protocol.models.AirbyteGlobalState; +import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; +import io.airbyte.protocol.models.AirbyteStreamState; +import io.airbyte.protocol.models.StreamDescriptor; +import io.airbyte.server.converters.ProtocolConverters; +import java.io.IOException; +import java.util.List; +import java.util.Optional; +import java.util.UUID; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +public class StateHandlerTest { + + public static final UUID CONNECTION_ID = UUID.randomUUID(); + private static final JsonNode JSON_BLOB = Jsons.deserialize("{\"users\": 10}"); + public static final StreamDescriptor STREAM_DESCRIPTOR1 = new StreamDescriptor().withName("coffee"); + public static final StreamDescriptor STREAM_DESCRIPTOR2 = new StreamDescriptor().withName("tea"); + + private StateHandler stateHandler; + private StatePersistence statePersistence; + + @BeforeEach + void setup() { + statePersistence = mock(StatePersistence.class); + stateHandler = new StateHandler(statePersistence); + } + + @Test + void testGetCurrentStateEmpty() throws IOException { + when(statePersistence.getCurrentState(CONNECTION_ID)).thenReturn(Optional.empty()); + + final ConnectionState expected = new ConnectionState().connectionId(CONNECTION_ID).stateType(ConnectionStateType.NOT_SET).streamState(null); + final ConnectionState actual = stateHandler.getState(new ConnectionIdRequestBody().connectionId(CONNECTION_ID)); + assertEquals(expected, actual); + } + + @Test + void testGetLegacyState() throws IOException { + when(statePersistence.getCurrentState(CONNECTION_ID)).thenReturn(Optional.of( + new StateWrapper() + .withStateType(StateType.LEGACY) + .withLegacyState(JSON_BLOB))); + + final ConnectionState expected = new ConnectionState() + .connectionId(CONNECTION_ID) + .stateType(ConnectionStateType.LEGACY) + .streamState(null) + .state(JSON_BLOB); + final ConnectionState actual = stateHandler.getState(new ConnectionIdRequestBody().connectionId(CONNECTION_ID)); + assertEquals(expected, actual); + } + + @Test + void testGetGlobalState() throws IOException { + when(statePersistence.getCurrentState(CONNECTION_ID)).thenReturn(Optional.of( + new StateWrapper() + .withStateType(StateType.GLOBAL) + .withGlobal(new AirbyteStateMessage().withType(AirbyteStateType.GLOBAL).withGlobal(new AirbyteGlobalState() + .withSharedState(JSON_BLOB) + .withStreamStates(List.of( + new AirbyteStreamState().withStreamDescriptor(STREAM_DESCRIPTOR1).withStreamState(JSON_BLOB), + new AirbyteStreamState().withStreamDescriptor(STREAM_DESCRIPTOR2).withStreamState(JSON_BLOB))))))); + + final ConnectionState expected = new ConnectionState() + .connectionId(CONNECTION_ID) + .stateType(ConnectionStateType.GLOBAL) + .streamState(null) + .globalState(new GlobalState().sharedState(JSON_BLOB).streamStates(List.of( + new StreamState().streamDescriptor(ProtocolConverters.streamDescriptorToApi(STREAM_DESCRIPTOR1)).streamState(JSON_BLOB), + new StreamState().streamDescriptor(ProtocolConverters.streamDescriptorToApi(STREAM_DESCRIPTOR2)).streamState(JSON_BLOB)))); + final ConnectionState actual = stateHandler.getState(new ConnectionIdRequestBody().connectionId(CONNECTION_ID)); + assertEquals(expected, actual); + } + + @Test + void testGetStreamState() throws IOException { + when(statePersistence.getCurrentState(CONNECTION_ID)).thenReturn(Optional.of( + new StateWrapper() + .withStateType(StateType.STREAM) + .withStateMessages(List.of( + new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState().withStreamDescriptor(STREAM_DESCRIPTOR1).withStreamState(JSON_BLOB)), + new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState().withStreamDescriptor(STREAM_DESCRIPTOR2).withStreamState(JSON_BLOB)))))); + + final ConnectionState expected = new ConnectionState() + .connectionId(CONNECTION_ID) + .stateType(ConnectionStateType.STREAM) + .streamState(List.of( + new StreamState().streamDescriptor(ProtocolConverters.streamDescriptorToApi(STREAM_DESCRIPTOR1)).streamState(JSON_BLOB), + new StreamState().streamDescriptor(ProtocolConverters.streamDescriptorToApi(STREAM_DESCRIPTOR2)).streamState(JSON_BLOB))); + final ConnectionState actual = stateHandler.getState(new ConnectionIdRequestBody().connectionId(CONNECTION_ID)); + assertEquals(expected, actual); + } + + // the api type has an extra type, so the verifying the compatibility of the type conversion is more + // involved + @Test + void testEnumConversion() { + assertEquals(3, AirbyteStateType.class.getEnumConstants().length); + assertEquals(4, ConnectionStateType.class.getEnumConstants().length); + + // to AirbyteStateType => ConnectionStateType + assertEquals(ConnectionStateType.GLOBAL, Enums.convertTo(AirbyteStateType.GLOBAL, ConnectionStateType.class)); + assertEquals(ConnectionStateType.STREAM, Enums.convertTo(AirbyteStateType.STREAM, ConnectionStateType.class)); + assertEquals(ConnectionStateType.LEGACY, Enums.convertTo(AirbyteStateType.LEGACY, ConnectionStateType.class)); + + // to ConnectionStateType => AirbyteStateType + assertEquals(AirbyteStateType.GLOBAL, Enums.convertTo(ConnectionStateType.GLOBAL, AirbyteStateType.class)); + assertEquals(AirbyteStateType.STREAM, Enums.convertTo(ConnectionStateType.STREAM, AirbyteStateType.class)); + assertEquals(AirbyteStateType.LEGACY, Enums.convertTo(ConnectionStateType.LEGACY, AirbyteStateType.class)); + } + +} diff --git a/airbyte-server/src/test/java/io/airbyte/server/handlers/WebBackendConnectionsHandlerTest.java b/airbyte-server/src/test/java/io/airbyte/server/handlers/WebBackendConnectionsHandlerTest.java index da293306d38e..1fc867dc8da2 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/handlers/WebBackendConnectionsHandlerTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/handlers/WebBackendConnectionsHandlerTest.java @@ -17,9 +17,11 @@ import com.google.common.collect.Lists; import io.airbyte.api.model.generated.AirbyteCatalog; +import io.airbyte.api.model.generated.AirbyteStream; import io.airbyte.api.model.generated.AirbyteStreamAndConfiguration; import io.airbyte.api.model.generated.AttemptRead; import io.airbyte.api.model.generated.AttemptStatus; +import io.airbyte.api.model.generated.CatalogDiff; import io.airbyte.api.model.generated.ConnectionCreate; import io.airbyte.api.model.generated.ConnectionIdRequestBody; import io.airbyte.api.model.generated.ConnectionRead; @@ -48,6 +50,9 @@ import io.airbyte.api.model.generated.SourceDiscoverSchemaRequestBody; import io.airbyte.api.model.generated.SourceIdRequestBody; import io.airbyte.api.model.generated.SourceRead; +import io.airbyte.api.model.generated.StreamDescriptor; +import io.airbyte.api.model.generated.StreamTransform; +import io.airbyte.api.model.generated.StreamTransform.TransformTypeEnum; import io.airbyte.api.model.generated.SyncMode; import io.airbyte.api.model.generated.SynchronousJobRead; import io.airbyte.api.model.generated.WebBackendConnectionCreate; @@ -77,13 +82,14 @@ import io.airbyte.server.helpers.SourceDefinitionHelpers; import io.airbyte.server.helpers.SourceHelpers; import io.airbyte.validation.json.JsonValidationException; -import io.airbyte.workers.helper.ConnectionHelper; +import io.airbyte.workers.temporal.TemporalClient.ManualOperationResult; import java.io.IOException; import java.lang.reflect.Method; import java.time.Instant; import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.Optional; import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; @@ -104,12 +110,12 @@ class WebBackendConnectionsHandlerTest { private WebBackendConnectionRead expected; private WebBackendConnectionRead expectedWithNewSchema; private EventRunner eventRunner; - private ConnectionHelper connectionHelper; private ConfigRepository configRepository; @BeforeEach public void setup() throws IOException, JsonValidationException, ConfigNotFoundException { connectionsHandler = mock(ConnectionsHandler.class); + final StateHandler stateHandler = mock(StateHandler.class); operationsHandler = mock(OperationsHandler.class); final SourceHandler sourceHandler = mock(SourceHandler.class); final DestinationHandler destinationHandler = mock(DestinationHandler.class); @@ -117,8 +123,9 @@ public void setup() throws IOException, JsonValidationException, ConfigNotFoundE configRepository = mock(ConfigRepository.class); schedulerHandler = mock(SchedulerHandler.class); eventRunner = mock(EventRunner.class); - connectionHelper = mock(ConnectionHelper.class); - wbHandler = new WebBackendConnectionsHandler(connectionsHandler, + wbHandler = new WebBackendConnectionsHandler( + connectionsHandler, + stateHandler, sourceHandler, destinationHandler, jobHistoryHandler, @@ -227,6 +234,10 @@ public void setup() throws IOException, JsonValidationException, ConfigNotFoundE .latestSyncJobCreatedAt(expected.getLatestSyncJobCreatedAt()) .latestSyncJobStatus(expected.getLatestSyncJobStatus()) .isSyncing(expected.getIsSyncing()) + .catalogDiff(new CatalogDiff().transforms(List.of( + new StreamTransform().transformType(TransformTypeEnum.ADD_STREAM) + .streamDescriptor(new StreamDescriptor().name("users-data1")) + .updateStream(null)))) .resourceRequirements(new ResourceRequirements() .cpuRequest(ConnectionHelpers.TESTING_RESOURCE_REQUIREMENTS.getCpuRequest()) .cpuLimit(ConnectionHelpers.TESTING_RESOURCE_REQUIREMENTS.getCpuLimit()) @@ -349,7 +360,6 @@ public WebBackendConnectionRead testWebBackendGetConnection(final boolean withCa when(operationsHandler.listOperationsForConnection(connectionIdRequestBody)).thenReturn(operationReadList); return wbHandler.webBackendGetConnection(webBackendConnectionIdRequestBody); - } @Test @@ -467,10 +477,12 @@ public void testForConnectionCreateCompleteness() { .collect(Collectors.toSet()); final String message = - "If this test is failing, it means you added a field to ConnectionCreate!\nCongratulations, but you're not done yet..\n" - + "\tYou should update WebBackendConnectionsHandler::toConnectionCreate\n" - + "\tand ensure that the field is tested in WebBackendConnectionsHandlerTest::testToConnectionCreate\n" - + "Then you can add the field name here to make this test pass. Cheers!"; + """ + If this test is failing, it means you added a field to ConnectionCreate! + Congratulations, but you're not done yet.. + \tYou should update WebBackendConnectionsHandler::toConnectionCreate + \tand ensure that the field is tested in WebBackendConnectionsHandlerTest::testToConnectionCreate + Then you can add the field name here to make this test pass. Cheers!"""; assertEquals(handledMethods, methods, message); } @@ -486,10 +498,12 @@ public void testForConnectionUpdateCompleteness() { .collect(Collectors.toSet()); final String message = - "If this test is failing, it means you added a field to ConnectionUpdate!\nCongratulations, but you're not done yet..\n" - + "\tYou should update WebBackendConnectionsHandler::toConnectionUpdate\n" - + "\tand ensure that the field is tested in WebBackendConnectionsHandlerTest::testToConnectionUpdate\n" - + "Then you can add the field name here to make this test pass. Cheers!"; + """ + If this test is failing, it means you added a field to ConnectionUpdate! + Congratulations, but you're not done yet.. + \tYou should update WebBackendConnectionsHandler::toConnectionUpdate + \tand ensure that the field is tested in WebBackendConnectionsHandlerTest::testToConnectionUpdate + Then you can add the field name here to make this test pass. Cheers!"""; assertEquals(handledMethods, methods, message); } @@ -600,6 +614,13 @@ void testUpdateConnectionWithUpdatedSchema() throws JsonValidationException, Con when(connectionsHandler.updateConnection(any())).thenReturn(connectionRead); when(connectionsHandler.getConnection(expected.getConnectionId())).thenReturn(connectionRead); + final List connectionStreams = List.of(ConnectionHelpers.STREAM_DESCRIPTOR); + when(configRepository.getAllStreamsForConnection(expected.getConnectionId())).thenReturn(connectionStreams); + + final ManualOperationResult successfulResult = ManualOperationResult.builder().jobId(Optional.empty()).failingReason(Optional.empty()).build(); + when(eventRunner.synchronousResetConnection(any(), any())).thenReturn(successfulResult); + when(eventRunner.startNewManualSync(any())).thenReturn(successfulResult); + final WebBackendConnectionRead result = wbHandler.webBackendUpdateConnection(updateBody); assertEquals(expectedWithNewSchema.getSyncCatalog(), result.getSyncCatalog()); @@ -609,7 +630,7 @@ void testUpdateConnectionWithUpdatedSchema() throws JsonValidationException, Con verify(schedulerHandler, times(0)).syncConnection(connectionId); verify(connectionsHandler, times(1)).updateConnection(any()); final InOrder orderVerifier = inOrder(eventRunner); - orderVerifier.verify(eventRunner, times(1)).synchronousResetConnection(connectionId.getConnectionId()); + orderVerifier.verify(eventRunner, times(1)).synchronousResetConnection(connectionId.getConnectionId(), connectionStreams); orderVerifier.verify(eventRunner, times(1)).startNewManualSync(connectionId.getConnectionId()); } @@ -773,4 +794,52 @@ public void testUpdateSchemaWithDiscoveryMergeNewStream() { assertEquals(expected, actual); } + @Test + public void testUpdateSchemaWithNamespacedStreams() { + final AirbyteCatalog original = ConnectionHelpers.generateBasicApiCatalog(); + final AirbyteStreamAndConfiguration stream1Config = original.getStreams().get(0); + final AirbyteStream stream1 = stream1Config.getStream(); + final AirbyteStream stream2 = new AirbyteStream() + .name(stream1.getName()) + .namespace("second_namespace") + .jsonSchema(stream1.getJsonSchema()) + .defaultCursorField(stream1.getDefaultCursorField()) + .supportedSyncModes(stream1.getSupportedSyncModes()) + .sourceDefinedCursor(stream1.getSourceDefinedCursor()) + .sourceDefinedPrimaryKey(stream1.getSourceDefinedPrimaryKey()); + final AirbyteStreamAndConfiguration stream2Config = new AirbyteStreamAndConfiguration() + .config(stream1Config.getConfig()) + .stream(stream2); + original.getStreams().add(stream2Config); + + final AirbyteCatalog discovered = ConnectionHelpers.generateBasicApiCatalog(); + discovered.getStreams().get(0).getStream() + .name("stream1") + .jsonSchema(CatalogHelpers.fieldsToJsonSchema(Field.of("field1", JsonSchemaType.STRING))) + .supportedSyncModes(List.of(SyncMode.FULL_REFRESH)); + discovered.getStreams().get(0).getConfig() + .syncMode(SyncMode.FULL_REFRESH) + .cursorField(Collections.emptyList()) + .destinationSyncMode(DestinationSyncMode.OVERWRITE) + .primaryKey(Collections.emptyList()) + .aliasName("stream1"); + + final AirbyteCatalog expected = ConnectionHelpers.generateBasicApiCatalog(); + expected.getStreams().get(0).getStream() + .name("stream1") + .jsonSchema(CatalogHelpers.fieldsToJsonSchema(Field.of("field1", JsonSchemaType.STRING))) + .supportedSyncModes(List.of(SyncMode.FULL_REFRESH)); + expected.getStreams().get(0).getConfig() + .syncMode(SyncMode.FULL_REFRESH) + .cursorField(Collections.emptyList()) + .destinationSyncMode(DestinationSyncMode.OVERWRITE) + .primaryKey(Collections.emptyList()) + .aliasName("stream1") + .setSelected(false); + + final AirbyteCatalog actual = WebBackendConnectionsHandler.updateSchemaWithDiscovery(original, discovered); + + assertEquals(expected, actual); + } + } diff --git a/airbyte-server/src/test/java/io/airbyte/server/helpers/ConnectionHelpers.java b/airbyte-server/src/test/java/io/airbyte/server/helpers/ConnectionHelpers.java index 2612c6ba21c0..0e5aa62b3e15 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/helpers/ConnectionHelpers.java +++ b/airbyte-server/src/test/java/io/airbyte/server/helpers/ConnectionHelpers.java @@ -27,6 +27,7 @@ import io.airbyte.protocol.models.DestinationSyncMode; import io.airbyte.protocol.models.Field; import io.airbyte.protocol.models.JsonSchemaType; +import io.airbyte.protocol.models.StreamDescriptor; import io.airbyte.server.handlers.helpers.CatalogConverter; import java.util.ArrayList; import java.util.Collections; @@ -35,11 +36,14 @@ public class ConnectionHelpers { - private static final String STREAM_NAME = "users-data"; + private static final String STREAM_NAME_BASE = "users-data"; + private static final String STREAM_NAME = STREAM_NAME_BASE + "0"; private static final String FIELD_NAME = "id"; private static final String BASIC_SCHEDULE_TIME_UNIT = "days"; private static final long BASIC_SCHEDULE_UNITS = 1L; + public static final StreamDescriptor STREAM_DESCRIPTOR = new StreamDescriptor().withName(STREAM_NAME); + // only intended for unit tests, so intentionally set very high to ensure they aren't being used // elsewhere public static final io.airbyte.config.ResourceRequirements TESTING_RESOURCE_REQUIREMENTS = new io.airbyte.config.ResourceRequirements() @@ -99,7 +103,8 @@ public static Schedule generateBasicSchedule() { public static ConnectionRead generateExpectedConnectionRead(final UUID connectionId, final UUID sourceId, final UUID destinationId, - final List operationIds) { + final List operationIds, + final UUID sourceCatalogId) { return new ConnectionRead() .connectionId(connectionId) @@ -117,7 +122,8 @@ public static ConnectionRead generateExpectedConnectionRead(final UUID connectio .cpuRequest(TESTING_RESOURCE_REQUIREMENTS.getCpuRequest()) .cpuLimit(TESTING_RESOURCE_REQUIREMENTS.getCpuLimit()) .memoryRequest(TESTING_RESOURCE_REQUIREMENTS.getMemoryRequest()) - .memoryLimit(TESTING_RESOURCE_REQUIREMENTS.getMemoryLimit())); + .memoryLimit(TESTING_RESOURCE_REQUIREMENTS.getMemoryLimit())) + .sourceCatalogId(sourceCatalogId); } public static ConnectionRead generateExpectedConnectionRead(final StandardSync standardSync) { @@ -125,7 +131,8 @@ public static ConnectionRead generateExpectedConnectionRead(final StandardSync s standardSync.getConnectionId(), standardSync.getSourceId(), standardSync.getDestinationId(), - standardSync.getOperationIds()); + standardSync.getOperationIds(), + standardSync.getSourceCatalogId()); if (standardSync.getSchedule() == null) { connectionRead.schedule(null); @@ -147,7 +154,8 @@ public static ConnectionRead connectionReadFromStandardSync(final StandardSync s .operationIds(standardSync.getOperationIds()) .name(standardSync.getName()) .namespaceFormat(standardSync.getNamespaceFormat()) - .prefix(standardSync.getPrefix()); + .prefix(standardSync.getPrefix()) + .sourceCatalogId(standardSync.getSourceCatalogId()); if (standardSync.getNamespaceDefinition() != null) { connectionRead @@ -196,7 +204,7 @@ private static io.airbyte.protocol.models.AirbyteStream generateBasicAirbyteStre public static AirbyteCatalog generateBasicApiCatalog() { return new AirbyteCatalog().streams(Lists.newArrayList(new AirbyteStreamAndConfiguration() - .stream(generateBasicApiStream()) + .stream(generateBasicApiStream(null)) .config(generateBasicApiStreamConfig()))); } @@ -204,7 +212,7 @@ public static AirbyteCatalog generateMultipleStreamsApiCatalog(final int streams final List streamAndConfigurations = new ArrayList<>(); for (int i = 0; i < streamsCount; i++) { streamAndConfigurations.add(new AirbyteStreamAndConfiguration() - .stream(generateBasicApiStream()) + .stream(generateBasicApiStream(String.valueOf(i))) .config(generateBasicApiStreamConfig())); } return new AirbyteCatalog().streams(streamAndConfigurations); @@ -221,8 +229,12 @@ private static AirbyteStreamConfiguration generateBasicApiStreamConfig() { } private static AirbyteStream generateBasicApiStream() { + return generateBasicApiStream(null); + } + + private static AirbyteStream generateBasicApiStream(final String nameSuffix) { return new AirbyteStream() - .name(STREAM_NAME) + .name(nameSuffix == null ? STREAM_NAME : STREAM_NAME_BASE + nameSuffix) .jsonSchema(generateBasicJsonSchema()) .defaultCursorField(Lists.newArrayList(FIELD_NAME)) .sourceDefinedCursor(false) diff --git a/airbyte-server/src/test/java/io/airbyte/server/version_mismatch/VersionMismatchServerTest.java b/airbyte-server/src/test/java/io/airbyte/server/version_mismatch/VersionMismatchServerTest.java deleted file mode 100644 index 635f1bbbf162..000000000000 --- a/airbyte-server/src/test/java/io/airbyte/server/version_mismatch/VersionMismatchServerTest.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright (c) 2022 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.version_mismatch; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import com.google.common.net.HttpHeaders; -import io.airbyte.commons.version.AirbyteVersion; -import java.net.HttpURLConnection; -import java.net.ServerSocket; -import java.net.URI; -import java.net.URL; -import org.eclipse.jetty.http.HttpStatus; -import org.eclipse.jetty.server.Server; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.ValueSource; - -public class VersionMismatchServerTest { - - private static final AirbyteVersion VERSION1 = new AirbyteVersion("0.1.0-alpha"); - private static final AirbyteVersion VERSION2 = new AirbyteVersion("0.2.0-alpha"); - - private static URI rootUri; - private static Server server; - - @BeforeAll - public static void startServer() throws Exception { - // get any available local port - final ServerSocket socket = new ServerSocket(0); - final int port = socket.getLocalPort(); - socket.close(); - - server = new VersionMismatchServer(VERSION1, VERSION2, port).getServer(); - rootUri = new URI("http://localhost:" + port + "/"); - - server.start(); - } - - @AfterAll - public static void stopServer() throws Exception { - server.stop(); - } - - @ParameterizedTest - @ValueSource(strings = { - "/", - "/api/v1/health", - "/random_path" - }) - public void testIt(final String relativePath) throws Exception { - final URL url = rootUri.resolve(relativePath).toURL(); - final HttpURLConnection http = (HttpURLConnection) url.openConnection(); - - http.connect(); - - assertEquals(HttpStatus.INTERNAL_SERVER_ERROR_500, http.getResponseCode()); - - assertEquals(http.getHeaderField(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN), "*"); - assertEquals(http.getHeaderField(HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS), "Origin, Content-Type, Accept, Content-Encoding"); - assertEquals(http.getHeaderField(HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS), "GET, POST, PUT, DELETE, OPTIONS, HEAD"); - } - -} diff --git a/airbyte-test-utils/build.gradle b/airbyte-test-utils/build.gradle index d671bd39d8f2..8518cb7a5ade 100644 --- a/airbyte-test-utils/build.gradle +++ b/airbyte-test-utils/build.gradle @@ -1,13 +1,26 @@ plugins { - id 'java' + id 'java-library' } dependencies { - implementation project(':airbyte-db:db-lib') + api project(':airbyte-db:db-lib') + implementation project(':airbyte-api') + implementation project(':airbyte-workers') - implementation libs.testcontainers.jdbc - implementation libs.testcontainers.postgresql - implementation libs.testcontainers.cockroachdb + implementation 'io.fabric8:kubernetes-client:5.12.2' + implementation 'io.temporal:temporal-sdk:1.8.1' - implementation 'org.junit.jupiter:junit-jupiter-api:5.7.2' + + api libs.junit.jupiter.api + + // Mark as compile only to avoid leaking transitively to connectors + compileOnly libs.platform.testcontainers.jdbc + compileOnly libs.platform.testcontainers.postgresql + compileOnly libs.platform.testcontainers.cockroachdb + + testImplementation libs.platform.testcontainers.jdbc + testImplementation libs.platform.testcontainers.postgresql + testImplementation libs.platform.testcontainers.cockroachdb } + +Task publishArtifactsTask = getPublishArtifactsTask("$rootProject.ext.version", project) diff --git a/airbyte-tests/src/main/java/io/airbyte/test/airbyte_test_container/AirbyteTestContainer.java b/airbyte-test-utils/src/main/java/io/airbyte/test/airbyte_test_container/AirbyteTestContainer.java similarity index 100% rename from airbyte-tests/src/main/java/io/airbyte/test/airbyte_test_container/AirbyteTestContainer.java rename to airbyte-test-utils/src/main/java/io/airbyte/test/airbyte_test_container/AirbyteTestContainer.java diff --git a/airbyte-test-utils/src/main/java/io/airbyte/test/utils/AirbyteAcceptanceTestHarness.java b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/AirbyteAcceptanceTestHarness.java new file mode 100644 index 000000000000..a01439de5465 --- /dev/null +++ b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/AirbyteAcceptanceTestHarness.java @@ -0,0 +1,730 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.test.utils; + +import static java.lang.Thread.sleep; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.collect.Lists; +import com.google.common.collect.Sets; +import com.google.common.io.Resources; +import io.airbyte.api.client.AirbyteApiClient; +import io.airbyte.api.client.generated.JobsApi; +import io.airbyte.api.client.invoker.generated.ApiException; +import io.airbyte.api.client.model.generated.AirbyteCatalog; +import io.airbyte.api.client.model.generated.AttemptInfoRead; +import io.airbyte.api.client.model.generated.ConnectionCreate; +import io.airbyte.api.client.model.generated.ConnectionIdRequestBody; +import io.airbyte.api.client.model.generated.ConnectionRead; +import io.airbyte.api.client.model.generated.ConnectionSchedule; +import io.airbyte.api.client.model.generated.ConnectionState; +import io.airbyte.api.client.model.generated.ConnectionStatus; +import io.airbyte.api.client.model.generated.ConnectionUpdate; +import io.airbyte.api.client.model.generated.DestinationCreate; +import io.airbyte.api.client.model.generated.DestinationDefinitionCreate; +import io.airbyte.api.client.model.generated.DestinationDefinitionRead; +import io.airbyte.api.client.model.generated.DestinationIdRequestBody; +import io.airbyte.api.client.model.generated.DestinationRead; +import io.airbyte.api.client.model.generated.JobIdRequestBody; +import io.airbyte.api.client.model.generated.JobRead; +import io.airbyte.api.client.model.generated.JobStatus; +import io.airbyte.api.client.model.generated.NamespaceDefinitionType; +import io.airbyte.api.client.model.generated.OperationCreate; +import io.airbyte.api.client.model.generated.OperationIdRequestBody; +import io.airbyte.api.client.model.generated.OperationRead; +import io.airbyte.api.client.model.generated.OperatorConfiguration; +import io.airbyte.api.client.model.generated.OperatorNormalization; +import io.airbyte.api.client.model.generated.OperatorType; +import io.airbyte.api.client.model.generated.SourceCreate; +import io.airbyte.api.client.model.generated.SourceDefinitionCreate; +import io.airbyte.api.client.model.generated.SourceDefinitionRead; +import io.airbyte.api.client.model.generated.SourceDiscoverSchemaRequestBody; +import io.airbyte.api.client.model.generated.SourceIdRequestBody; +import io.airbyte.api.client.model.generated.SourceRead; +import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.resources.MoreResources; +import io.airbyte.commons.util.MoreProperties; +import io.airbyte.db.Database; +import io.airbyte.test.airbyte_test_container.AirbyteTestContainer; +import io.airbyte.workers.temporal.TemporalUtils; +import io.airbyte.workers.temporal.scheduling.ConnectionManagerWorkflow; +import io.airbyte.workers.temporal.scheduling.state.WorkflowState; +import io.fabric8.kubernetes.client.DefaultKubernetesClient; +import io.fabric8.kubernetes.client.KubernetesClient; +import io.temporal.client.WorkflowClient; +import io.temporal.serviceclient.WorkflowServiceStubs; +import java.io.File; +import java.io.IOException; +import java.net.Inet4Address; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.UnknownHostException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Path; +import java.sql.SQLException; +import java.time.Duration; +import java.time.Instant; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.UUID; +import java.util.stream.Collectors; +import org.jooq.JSONB; +import org.jooq.Record; +import org.jooq.Result; +import org.jooq.SQLDialect; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.testcontainers.containers.PostgreSQLContainer; +import org.testcontainers.utility.MountableFile; + +/** + * This class contains containers used for acceptance tests. Some of those containers/states are + * only used when the test are run without GKE. Specific environmental variables govern what types + * of containers are run. + *

+ * This class is put in a separate module to be easily pulled in as a dependency for Airbyte Cloud + * Acceptance Tests. + *

+ * Containers and states include: + *

  • source postgres SQL
  • + *
  • destination postgres SQL
  • + *
  • {@link AirbyteTestContainer}
  • + *
  • kubernetes client
  • + *
  • lists of UUIDS representing IDs of sources, destinations, connections, and operations
  • + */ +public class AirbyteAcceptanceTestHarness { + + private static final Logger LOGGER = LoggerFactory.getLogger(AirbyteAcceptanceTestHarness.class); + + private static final String DOCKER_COMPOSE_FILE_NAME = "docker-compose.yaml"; + // assume env file is one directory level up from airbyte-tests. + private final static File ENV_FILE = Path.of(System.getProperty("user.dir")).getParent().resolve(".env").toFile(); + + private static final String SOURCE_E2E_TEST_CONNECTOR_VERSION = "0.1.1"; + private static final String DESTINATION_E2E_TEST_CONNECTOR_VERSION = "0.1.1"; + + private static final String OUTPUT_NAMESPACE_PREFIX = "output_namespace_"; + private static final String OUTPUT_NAMESPACE = OUTPUT_NAMESPACE_PREFIX + "${SOURCE_NAMESPACE}"; + private static final String OUTPUT_STREAM_PREFIX = "output_table_"; + private static final String TABLE_NAME = "id_and_name"; + public static final String STREAM_NAME = TABLE_NAME; + public static final String COLUMN_ID = "id"; + public static final String COLUMN_NAME = "name"; + private static final String COLUMN_NAME_DATA = "_airbyte_data"; + private static final String SOURCE_USERNAME = "sourceusername"; + public static final String SOURCE_PASSWORD = "hunter2"; + + private static boolean isKube; + private static boolean isMinikube; + private static boolean isGke; + private static boolean isMac; + private static boolean useExternalDeployment; + + /** + * When the acceptance tests are run against a local instance of docker-compose or KUBE then these + * test containers are used. When we run these tests in GKE, we spawn a source and destination + * postgres database ane use them for testing. + */ + private PostgreSQLContainer sourcePsql; + private PostgreSQLContainer destinationPsql; + private AirbyteTestContainer airbyteTestContainer; + private AirbyteApiClient apiClient; + private final UUID defaultWorkspaceId; + + private KubernetesClient kubernetesClient = null; + + private List sourceIds; + private List connectionIds; + private List destinationIds; + private List operationIds; + + public PostgreSQLContainer getSourcePsql() { + return sourcePsql; + } + + public KubernetesClient getKubernetesClient() { + return kubernetesClient; + } + + public void removeConnection(final UUID connection) { + connectionIds.remove(connection); + } + + public void setApiClient(final AirbyteApiClient apiClient) { + this.apiClient = apiClient; + } + + @SuppressWarnings("UnstableApiUsage") + public AirbyteAcceptanceTestHarness(final AirbyteApiClient apiClient, final UUID defaultWorkspaceId) + throws URISyntaxException, IOException, InterruptedException, ApiException { + // reads env vars to assign static variables + assignEnvVars(); + this.apiClient = apiClient; + this.defaultWorkspaceId = defaultWorkspaceId; + + if (isGke && !isKube) { + throw new RuntimeException("KUBE Flag should also be enabled if GKE flag is enabled"); + } + if (!isGke) { + sourcePsql = new PostgreSQLContainer("postgres:13-alpine") + .withUsername(SOURCE_USERNAME) + .withPassword(SOURCE_PASSWORD); + sourcePsql.start(); + + destinationPsql = new PostgreSQLContainer("postgres:13-alpine"); + destinationPsql.start(); + } + + if (isKube) { + kubernetesClient = new DefaultKubernetesClient(); + } + + // by default use airbyte deployment governed by a test container. + if (!useExternalDeployment) { + LOGGER.info("Using deployment of airbyte managed by test containers."); + airbyteTestContainer = new AirbyteTestContainer.Builder(new File(Resources.getResource(DOCKER_COMPOSE_FILE_NAME).toURI())) + .setEnv(MoreProperties.envFileToProperties(ENV_FILE)) + // override env VERSION to use dev to test current build of airbyte. + .setEnvVariable("VERSION", "dev") + // override to use test mounts. + .setEnvVariable("DATA_DOCKER_MOUNT", "airbyte_data_migration_test") + .setEnvVariable("DB_DOCKER_MOUNT", "airbyte_db_migration_test") + .setEnvVariable("WORKSPACE_DOCKER_MOUNT", "airbyte_workspace_migration_test") + .setEnvVariable("LOCAL_ROOT", "/tmp/airbyte_local_migration_test") + .setEnvVariable("LOCAL_DOCKER_MOUNT", "/tmp/airbyte_local_migration_test") + .build(); + airbyteTestContainer.startBlocking(); + } else { + LOGGER.info("Using external deployment of airbyte."); + } + } + + public void stopDbAndContainers() { + if (!isGke) { + sourcePsql.stop(); + destinationPsql.stop(); + } + + if (airbyteTestContainer != null) { + airbyteTestContainer.stop(); + } + } + + public void setup() throws SQLException, URISyntaxException, IOException { + sourceIds = Lists.newArrayList(); + connectionIds = Lists.newArrayList(); + destinationIds = Lists.newArrayList(); + operationIds = Lists.newArrayList(); + + if (isGke) { + // seed database. + final Database database = getSourceDatabase(); + final Path path = Path.of(MoreResources.readResourceAsFile("postgres_init.sql").toURI()); + final StringBuilder query = new StringBuilder(); + for (final String line : java.nio.file.Files.readAllLines(path, StandardCharsets.UTF_8)) { + if (line != null && !line.isEmpty()) { + query.append(line); + } + } + database.query(context -> context.execute(query.toString())); + } else { + PostgreSQLContainerHelper.runSqlScript(MountableFile.forClasspathResource("postgres_init.sql"), sourcePsql); + + destinationPsql = new PostgreSQLContainer("postgres:13-alpine"); + destinationPsql.start(); + } + } + + public void cleanup() { + try { + clearSourceDbData(); + clearDestinationDbData(); + + for (final UUID operationId : operationIds) { + deleteOperation(operationId); + } + + for (final UUID connectionId : connectionIds) { + disableConnection(connectionId); + } + + for (final UUID sourceId : sourceIds) { + deleteSource(sourceId); + } + + for (final UUID destinationId : destinationIds) { + deleteDestination(destinationId); + } + } catch (final Exception e) { + LOGGER.error("Error tearing down test fixtures:", e); + } + } + + private void assignEnvVars() { + isKube = System.getenv().containsKey("KUBE"); + isMinikube = System.getenv().containsKey("IS_MINIKUBE"); + isGke = System.getenv().containsKey("IS_GKE"); + isMac = System.getProperty("os.name").startsWith("Mac"); + useExternalDeployment = + System.getenv("USE_EXTERNAL_DEPLOYMENT") != null && System.getenv("USE_EXTERNAL_DEPLOYMENT").equalsIgnoreCase("true"); + } + + private WorkflowClient getWorkflowClient() { + final WorkflowServiceStubs temporalService = TemporalUtils.createTemporalService( + TemporalUtils.getAirbyteTemporalOptions("localhost:7233"), + TemporalUtils.DEFAULT_NAMESPACE); + return WorkflowClient.newInstance(temporalService); + } + + public WorkflowState getWorkflowState(final UUID connectionId) { + final WorkflowClient workflowCLient = getWorkflowClient(); + + // check if temporal workflow is reachable + final ConnectionManagerWorkflow connectionManagerWorkflow = + workflowCLient.newWorkflowStub(ConnectionManagerWorkflow.class, "connection_manager_" + connectionId); + + return connectionManagerWorkflow.getState(); + } + + public void terminateTemporalWorkflow(final UUID connectionId) { + final WorkflowClient workflowCLient = getWorkflowClient(); + + // check if temporal workflow is reachable + getWorkflowState(connectionId); + + // Terminate workflow + LOGGER.info("Terminating temporal workflow..."); + workflowCLient.newUntypedWorkflowStub("connection_manager_" + connectionId).terminate(""); + + // remove connection to avoid exception during tear down + connectionIds.remove(connectionId); + } + + public AirbyteCatalog discoverSourceSchema(final UUID sourceId) throws ApiException { + return apiClient.getSourceApi().discoverSchemaForSource(new SourceDiscoverSchemaRequestBody().sourceId(sourceId)).getCatalog(); + } + + public void assertSourceAndDestinationDbInSync(final boolean withScdTable) throws Exception { + final Database source = getSourceDatabase(); + final Set sourceTables = listAllTables(source); + final Set sourceTablesWithRawTablesAdded = addAirbyteGeneratedTables(withScdTable, sourceTables); + final Database destination = getDestinationDatabase(); + final Set destinationTables = listAllTables(destination); + assertEquals(sourceTablesWithRawTablesAdded, destinationTables, + String.format("streams did not match.\n source stream names: %s\n destination stream names: %s\n", sourceTables, destinationTables)); + + for (final SchemaTableNamePair pair : sourceTables) { + final List sourceRecords = retrieveSourceRecords(source, pair.getFullyQualifiedTableName()); + assertRawDestinationContains(sourceRecords, pair); + } + } + + public Database getSourceDatabase() { + if (isKube && isGke) { + return GKEPostgresConfig.getSourceDatabase(); + } + return getDatabase(sourcePsql); + } + + private Database getDestinationDatabase() { + if (isKube && isGke) { + return GKEPostgresConfig.getDestinationDatabase(); + } + return getDatabase(destinationPsql); + } + + public Database getDatabase(final PostgreSQLContainer db) { + return new Database(DatabaseConnectionHelper.createDslContext(db, SQLDialect.POSTGRES)); + } + + public Set listAllTables(final Database database) throws SQLException { + return database.query( + context -> { + final Result fetch = + context.fetch( + "SELECT tablename, schemaname FROM pg_catalog.pg_tables WHERE schemaname != 'pg_catalog' AND schemaname != 'information_schema'"); + return fetch.stream() + .map(record -> { + final var schemaName = (String) record.get("schemaname"); + final var tableName = (String) record.get("tablename"); + return new SchemaTableNamePair(schemaName, tableName); + }) + .collect(Collectors.toSet()); + }); + } + + private Set addAirbyteGeneratedTables(final boolean withScdTable, final Set sourceTables) { + return sourceTables.stream().flatMap(x -> { + final String cleanedNameStream = x.tableName.replace(".", "_"); + final List explodedStreamNames = new ArrayList<>(List.of( + new SchemaTableNamePair(OUTPUT_NAMESPACE_PREFIX + x.schemaName, + String.format("_airbyte_raw_%s%s", OUTPUT_STREAM_PREFIX, cleanedNameStream)), + new SchemaTableNamePair(OUTPUT_NAMESPACE_PREFIX + x.schemaName, String.format("%s%s", OUTPUT_STREAM_PREFIX, cleanedNameStream)))); + if (withScdTable) { + explodedStreamNames + .add(new SchemaTableNamePair("_airbyte_" + OUTPUT_NAMESPACE_PREFIX + x.schemaName, + String.format("%s%s_stg", OUTPUT_STREAM_PREFIX, cleanedNameStream))); + explodedStreamNames + .add(new SchemaTableNamePair(OUTPUT_NAMESPACE_PREFIX + x.schemaName, String.format("%s%s_scd", OUTPUT_STREAM_PREFIX, cleanedNameStream))); + } + return explodedStreamNames.stream(); + }).collect(Collectors.toSet()); + } + + public void assertRawDestinationContains(final List sourceRecords, final SchemaTableNamePair pair) throws Exception { + final Set destinationRecords = new HashSet<>(retrieveRawDestinationRecords(pair)); + + assertEquals(sourceRecords.size(), destinationRecords.size(), + String.format("destination contains: %s record. source contains: %s, \nsource records %s \ndestination records: %s", + destinationRecords.size(), sourceRecords.size(), sourceRecords, destinationRecords)); + + for (final JsonNode sourceStreamRecord : sourceRecords) { + assertTrue(destinationRecords.contains(sourceStreamRecord), + String.format("destination does not contain record:\n %s \n destination contains:\n %s\n", + sourceStreamRecord, destinationRecords)); + } + } + + public void assertNormalizedDestinationContains(final List sourceRecords) throws Exception { + final Database destination = getDestinationDatabase(); + final String finalDestinationTable = String.format("%spublic.%s%s", OUTPUT_NAMESPACE_PREFIX, OUTPUT_STREAM_PREFIX, STREAM_NAME.replace(".", "_")); + final List destinationRecords = retrieveSourceRecords(destination, finalDestinationTable); + + assertEquals(sourceRecords.size(), destinationRecords.size(), + String.format("destination contains: %s record. source contains: %s", sourceRecords.size(), destinationRecords.size())); + + for (final JsonNode sourceStreamRecord : sourceRecords) { + assertTrue( + destinationRecords.stream() + .anyMatch(r -> r.get(COLUMN_NAME).asText().equals(sourceStreamRecord.get(COLUMN_NAME).asText()) + && r.get(COLUMN_ID).asInt() == sourceStreamRecord.get(COLUMN_ID).asInt()), + String.format("destination does not contain record:\n %s \n destination contains:\n %s\n", sourceStreamRecord, destinationRecords)); + } + } + + public ConnectionRead createConnection(final String name, + final UUID sourceId, + final UUID destinationId, + final List operationIds, + final AirbyteCatalog catalog, + final ConnectionSchedule schedule) + throws ApiException { + final ConnectionRead connection = apiClient.getConnectionApi().createConnection( + new ConnectionCreate() + .status(ConnectionStatus.ACTIVE) + .sourceId(sourceId) + .destinationId(destinationId) + .syncCatalog(catalog) + .schedule(schedule) + .operationIds(operationIds) + .name(name) + .namespaceDefinition(NamespaceDefinitionType.CUSTOMFORMAT) + .namespaceFormat(OUTPUT_NAMESPACE) + .prefix(OUTPUT_STREAM_PREFIX)); + connectionIds.add(connection.getConnectionId()); + return connection; + } + + public ConnectionRead updateConnectionSchedule(final UUID connectionId, final ConnectionSchedule newSchedule) throws ApiException { + final ConnectionRead connectionRead = apiClient.getConnectionApi().getConnection(new ConnectionIdRequestBody().connectionId(connectionId)); + + return apiClient.getConnectionApi().updateConnection( + new ConnectionUpdate() + .namespaceDefinition(connectionRead.getNamespaceDefinition()) + .namespaceFormat(connectionRead.getNamespaceFormat()) + .prefix(connectionRead.getPrefix()) + .connectionId(connectionId) + .operationIds(connectionRead.getOperationIds()) + .status(connectionRead.getStatus()) + .syncCatalog(connectionRead.getSyncCatalog()) + .name(connectionRead.getName()) + .resourceRequirements(connectionRead.getResourceRequirements()) + .schedule(newSchedule) // only field being updated + ); + } + + public DestinationRead createDestination() throws ApiException { + return createDestination( + "AccTestDestination-" + UUID.randomUUID(), + defaultWorkspaceId, + getDestinationDefId(), + getDestinationDbConfig()); + } + + public DestinationRead createDestination(final String name, final UUID workspaceId, final UUID destinationDefId, final JsonNode destinationConfig) + throws ApiException { + final DestinationRead destination = + apiClient.getDestinationApi().createDestination(new DestinationCreate() + .name(name) + .connectionConfiguration(Jsons.jsonNode(destinationConfig)) + .workspaceId(workspaceId) + .destinationDefinitionId(destinationDefId)); + destinationIds.add(destination.getDestinationId()); + return destination; + } + + public OperationRead createOperation() throws ApiException { + final OperatorConfiguration normalizationConfig = new OperatorConfiguration() + .operatorType(OperatorType.NORMALIZATION).normalization(new OperatorNormalization().option( + OperatorNormalization.OptionEnum.BASIC)); + + final OperationCreate operationCreate = new OperationCreate() + .workspaceId(defaultWorkspaceId) + .name("AccTestDestination-" + UUID.randomUUID()).operatorConfiguration(normalizationConfig); + + final OperationRead operation = apiClient.getOperationApi().createOperation(operationCreate); + operationIds.add(operation.getOperationId()); + return operation; + } + + public UUID getDestinationDefId() throws ApiException { + return apiClient.getDestinationDefinitionApi().listDestinationDefinitions().getDestinationDefinitions() + .stream() + .filter(dr -> dr.getName().toLowerCase().contains("postgres")) + .findFirst() + .orElseThrow() + .getDestinationDefinitionId(); + } + + public List retrieveSourceRecords(final Database database, final String table) throws SQLException { + return database.query(context -> context.fetch(String.format("SELECT * FROM %s;", table))) + .stream() + .map(Record::intoMap) + .map(Jsons::jsonNode) + .collect(Collectors.toList()); + } + + private List retrieveDestinationRecords(final Database database, final String table) throws SQLException { + return database.query(context -> context.fetch(String.format("SELECT * FROM %s;", table))) + .stream() + .map(Record::intoMap) + .map(r -> r.get(COLUMN_NAME_DATA)) + .map(f -> (JSONB) f) + .map(JSONB::data) + .map(Jsons::deserialize) + .map(Jsons::jsonNode) + .collect(Collectors.toList()); + } + + private List retrieveRawDestinationRecords(final SchemaTableNamePair pair) throws Exception { + final Database destination = getDestinationDatabase(); + final Set namePairs = listAllTables(destination); + + final String rawStreamName = String.format("_airbyte_raw_%s%s", OUTPUT_STREAM_PREFIX, pair.tableName.replace(".", "_")); + final SchemaTableNamePair rawTablePair = new SchemaTableNamePair(OUTPUT_NAMESPACE_PREFIX + pair.schemaName, rawStreamName); + assertTrue(namePairs.contains(rawTablePair), "can't find a non-normalized version (raw) of " + rawTablePair.getFullyQualifiedTableName()); + + return retrieveDestinationRecords(destination, rawTablePair.getFullyQualifiedTableName()); + } + + public JsonNode getSourceDbConfig() { + return getDbConfig(sourcePsql, false, false, Type.SOURCE); + } + + public JsonNode getDestinationDbConfig() { + return getDbConfig(destinationPsql, false, true, Type.DESTINATION); + } + + public JsonNode getDestinationDbConfigWithHiddenPassword() { + return getDbConfig(destinationPsql, true, true, Type.DESTINATION); + } + + public JsonNode getDbConfig(final PostgreSQLContainer psql, final boolean hiddenPassword, final boolean withSchema, final Type connectorType) { + try { + final Map dbConfig = (isKube && isGke) ? GKEPostgresConfig.dbConfig(connectorType, hiddenPassword, withSchema) + : localConfig(psql, hiddenPassword, withSchema); + return Jsons.jsonNode(dbConfig); + } catch (final Exception e) { + throw new RuntimeException(e); + } + } + + private Map localConfig(final PostgreSQLContainer psql, final boolean hiddenPassword, final boolean withSchema) + throws UnknownHostException { + final Map dbConfig = new HashMap<>(); + // don't use psql.getHost() directly since the ip we need differs depending on environment + if (isKube) { + if (isMinikube) { + // used with minikube driver=none instance + dbConfig.put("host", Inet4Address.getLocalHost().getHostAddress()); + } else { + // used on a single node with docker driver + dbConfig.put("host", "host.docker.internal"); + } + } else if (isMac) { + dbConfig.put("host", "host.docker.internal"); + } else { + dbConfig.put("host", "localhost"); + } + + if (hiddenPassword) { + dbConfig.put("password", "**********"); + } else { + dbConfig.put("password", psql.getPassword()); + } + + dbConfig.put("port", psql.getFirstMappedPort()); + dbConfig.put("database", psql.getDatabaseName()); + dbConfig.put("username", psql.getUsername()); + dbConfig.put("ssl", false); + + if (withSchema) { + dbConfig.put("schema", "public"); + } + return dbConfig; + } + + public SourceDefinitionRead createE2eSourceDefinition() throws ApiException { + return apiClient.getSourceDefinitionApi().createSourceDefinition(new SourceDefinitionCreate() + .name("E2E Test Source") + .dockerRepository("airbyte/source-e2e-test") + .dockerImageTag(SOURCE_E2E_TEST_CONNECTOR_VERSION) + .documentationUrl(URI.create("https://example.com"))); + } + + public DestinationDefinitionRead createE2eDestinationDefinition() throws ApiException { + return apiClient.getDestinationDefinitionApi().createDestinationDefinition(new DestinationDefinitionCreate() + .name("E2E Test Destination") + .dockerRepository("airbyte/destination-e2e-test") + .dockerImageTag(DESTINATION_E2E_TEST_CONNECTOR_VERSION) + .documentationUrl(URI.create("https://example.com"))); + } + + public SourceRead createPostgresSource() throws ApiException { + return createSource( + "acceptanceTestDb-" + UUID.randomUUID(), + defaultWorkspaceId, + getPostgresSourceDefinitionId(), + getSourceDbConfig()); + } + + public SourceRead createSource(final String name, final UUID workspaceId, final UUID sourceDefId, final JsonNode sourceConfig) + throws ApiException { + final SourceRead source = apiClient.getSourceApi().createSource(new SourceCreate() + .name(name) + .sourceDefinitionId(sourceDefId) + .workspaceId(workspaceId) + .connectionConfiguration(sourceConfig)); + sourceIds.add(source.getSourceId()); + return source; + } + + public UUID getPostgresSourceDefinitionId() throws ApiException { + return apiClient.getSourceDefinitionApi().listSourceDefinitions().getSourceDefinitions() + .stream() + .filter(sourceRead -> sourceRead.getName().equalsIgnoreCase("postgres")) + .findFirst() + .orElseThrow() + .getSourceDefinitionId(); + } + + private void clearSourceDbData() throws SQLException { + final Database database = getSourceDatabase(); + final Set pairs = listAllTables(database); + for (final SchemaTableNamePair pair : pairs) { + database.query(context -> context.execute(String.format("DROP TABLE %s.%s", pair.schemaName, pair.tableName))); + } + } + + private void clearDestinationDbData() throws SQLException { + final Database database = getDestinationDatabase(); + final Set pairs = listAllTables(database); + for (final SchemaTableNamePair pair : pairs) { + database.query(context -> context.execute(String.format("DROP TABLE %s.%s CASCADE", pair.schemaName, pair.tableName))); + } + } + + private void disableConnection(final UUID connectionId) throws ApiException { + final ConnectionRead connection = apiClient.getConnectionApi().getConnection(new ConnectionIdRequestBody().connectionId(connectionId)); + final ConnectionUpdate connectionUpdate = + new ConnectionUpdate() + .prefix(connection.getPrefix()) + .connectionId(connectionId) + .operationIds(connection.getOperationIds()) + .status(ConnectionStatus.DEPRECATED) + .schedule(connection.getSchedule()) + .syncCatalog(connection.getSyncCatalog()); + apiClient.getConnectionApi().updateConnection(connectionUpdate); + } + + private void deleteSource(final UUID sourceId) throws ApiException { + apiClient.getSourceApi().deleteSource(new SourceIdRequestBody().sourceId(sourceId)); + } + + private void deleteDestination(final UUID destinationId) throws ApiException { + apiClient.getDestinationApi().deleteDestination(new DestinationIdRequestBody().destinationId(destinationId)); + } + + private void deleteOperation(final UUID destinationId) throws ApiException { + apiClient.getOperationApi().deleteOperation(new OperationIdRequestBody().operationId(destinationId)); + } + + public static void waitForSuccessfulJob(final JobsApi jobsApi, final JobRead originalJob) throws InterruptedException, ApiException { + final JobRead job = waitWhileJobHasStatus(jobsApi, originalJob, Sets.newHashSet(JobStatus.PENDING, JobStatus.RUNNING)); + + if (!JobStatus.SUCCEEDED.equals(job.getStatus())) { + // If a job failed during testing, show us why. + final JobIdRequestBody id = new JobIdRequestBody(); + id.setId(originalJob.getId()); + for (final AttemptInfoRead attemptInfo : jobsApi.getJobInfo(id).getAttempts()) { + LOGGER.warn("Unsuccessful job attempt " + attemptInfo.getAttempt().getId() + + " with status " + job.getStatus() + " produced log output as follows: " + attemptInfo.getLogs().getLogLines()); + } + } + assertEquals(JobStatus.SUCCEEDED, job.getStatus()); + } + + public static JobRead waitWhileJobHasStatus(final JobsApi jobsApi, final JobRead originalJob, final Set jobStatuses) + throws InterruptedException, ApiException { + return waitWhileJobHasStatus(jobsApi, originalJob, jobStatuses, Duration.ofMinutes(6)); + } + + @SuppressWarnings("BusyWait") + public static JobRead waitWhileJobHasStatus(final JobsApi jobsApi, + final JobRead originalJob, + final Set jobStatuses, + final Duration maxWaitTime) + throws InterruptedException, ApiException { + JobRead job = originalJob; + + final Instant waitStart = Instant.now(); + while (jobStatuses.contains(job.getStatus())) { + if (Duration.between(waitStart, Instant.now()).compareTo(maxWaitTime) > 0) { + LOGGER.info("Max wait time of {} has been reached. Stopping wait.", maxWaitTime); + break; + } + sleep(1000); + + job = jobsApi.getJobInfo(new JobIdRequestBody().id(job.getId())).getJob(); + LOGGER.info("waiting: job id: {} config type: {} status: {}", job.getId(), job.getConfigType(), job.getStatus()); + } + return job; + } + + @SuppressWarnings("BusyWait") + public static ConnectionState waitForConnectionState(final AirbyteApiClient apiClient, final UUID connectionId) + throws ApiException, InterruptedException { + ConnectionState connectionState = apiClient.getConnectionApi().getState(new ConnectionIdRequestBody().connectionId(connectionId)); + int count = 0; + while (count < 60 && (connectionState.getState() == null || connectionState.getState().isNull())) { + LOGGER.info("fetching connection state. attempt: {}", count++); + connectionState = apiClient.getConnectionApi().getState(new ConnectionIdRequestBody().connectionId(connectionId)); + sleep(1000); + } + return connectionState; + } + + public enum Type { + SOURCE, + DESTINATION + } + +} diff --git a/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/GKEPostgresConfig.java b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/GKEPostgresConfig.java similarity index 95% rename from airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/GKEPostgresConfig.java rename to airbyte-test-utils/src/main/java/io/airbyte/test/utils/GKEPostgresConfig.java index 629e0403e273..581cce05bafc 100644 --- a/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/GKEPostgresConfig.java +++ b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/GKEPostgresConfig.java @@ -2,12 +2,12 @@ * Copyright (c) 2022 Airbyte, Inc., all rights reserved. */ -package io.airbyte.test.acceptance; +package io.airbyte.test.utils; import io.airbyte.db.Database; import io.airbyte.db.factory.DSLContextFactory; import io.airbyte.db.factory.DatabaseDriver; -import io.airbyte.test.acceptance.AdvancedAcceptanceTests.Type; +import io.airbyte.test.utils.AirbyteAcceptanceTestHarness.Type; import java.util.HashMap; import java.util.Map; import org.jooq.SQLDialect; diff --git a/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/SchemaTableNamePair.java b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/SchemaTableNamePair.java similarity index 96% rename from airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/SchemaTableNamePair.java rename to airbyte-test-utils/src/main/java/io/airbyte/test/utils/SchemaTableNamePair.java index df8c6e833c1e..c635f854c3aa 100644 --- a/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/SchemaTableNamePair.java +++ b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/SchemaTableNamePair.java @@ -2,7 +2,7 @@ * Copyright (c) 2022 Airbyte, Inc., all rights reserved. */ -package io.airbyte.test.acceptance; +package io.airbyte.test.utils; import java.util.Objects; diff --git a/airbyte-tests/build.gradle b/airbyte-tests/build.gradle index a684611d9f28..5931cac20e38 100644 --- a/airbyte-tests/build.gradle +++ b/airbyte-tests/build.gradle @@ -39,7 +39,7 @@ dependencies { implementation project(':airbyte-container-orchestrator') implementation 'io.fabric8:kubernetes-client:5.12.2' - implementation libs.testcontainers + implementation libs.platform.testcontainers acceptanceTestsImplementation project(':airbyte-api') acceptanceTestsImplementation project(':airbyte-commons') @@ -54,14 +54,16 @@ dependencies { acceptanceTestsImplementation 'io.github.cdimascio:java-dotenv:3.0.0' acceptanceTestsImplementation 'io.temporal:temporal-sdk:1.8.1' acceptanceTestsImplementation 'org.apache.commons:commons-csv:1.4' - acceptanceTestsImplementation libs.testcontainers.postgresql + acceptanceTestsImplementation libs.platform.testcontainers.postgresql acceptanceTestsImplementation libs.postgresql automaticMigrationAcceptanceTestImplementation project(':airbyte-api') automaticMigrationAcceptanceTestImplementation project(':airbyte-commons') automaticMigrationAcceptanceTestImplementation project(':airbyte-tests') + automaticMigrationAcceptanceTestImplementation project(':airbyte-test-utils') - automaticMigrationAcceptanceTestImplementation libs.testcontainers + + automaticMigrationAcceptanceTestImplementation libs.platform.testcontainers } // test should run using the current version of the docker compose configuration. diff --git a/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/AdvancedAcceptanceTests.java b/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/AdvancedAcceptanceTests.java index b6b5cd158148..813b0cc03921 100644 --- a/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/AdvancedAcceptanceTests.java +++ b/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/AdvancedAcceptanceTests.java @@ -4,36 +4,29 @@ package io.airbyte.test.acceptance; -import static java.lang.Thread.sleep; +import static io.airbyte.test.utils.AirbyteAcceptanceTestHarness.COLUMN_ID; +import static io.airbyte.test.utils.AirbyteAcceptanceTestHarness.SOURCE_PASSWORD; +import static io.airbyte.test.utils.AirbyteAcceptanceTestHarness.waitForConnectionState; +import static io.airbyte.test.utils.AirbyteAcceptanceTestHarness.waitForSuccessfulJob; +import static io.airbyte.test.utils.AirbyteAcceptanceTestHarness.waitWhileJobHasStatus; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; -import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Sets; -import com.google.common.io.Resources; import io.airbyte.api.client.AirbyteApiClient; -import io.airbyte.api.client.generated.JobsApi; import io.airbyte.api.client.invoker.generated.ApiClient; import io.airbyte.api.client.invoker.generated.ApiException; import io.airbyte.api.client.model.generated.AirbyteCatalog; import io.airbyte.api.client.model.generated.AirbyteStream; import io.airbyte.api.client.model.generated.AttemptInfoRead; -import io.airbyte.api.client.model.generated.ConnectionCreate; import io.airbyte.api.client.model.generated.ConnectionIdRequestBody; -import io.airbyte.api.client.model.generated.ConnectionRead; -import io.airbyte.api.client.model.generated.ConnectionSchedule; import io.airbyte.api.client.model.generated.ConnectionState; -import io.airbyte.api.client.model.generated.ConnectionStatus; -import io.airbyte.api.client.model.generated.ConnectionUpdate; -import io.airbyte.api.client.model.generated.DestinationCreate; -import io.airbyte.api.client.model.generated.DestinationDefinitionCreate; import io.airbyte.api.client.model.generated.DestinationDefinitionIdRequestBody; import io.airbyte.api.client.model.generated.DestinationDefinitionRead; -import io.airbyte.api.client.model.generated.DestinationIdRequestBody; import io.airbyte.api.client.model.generated.DestinationRead; import io.airbyte.api.client.model.generated.DestinationSyncMode; import io.airbyte.api.client.model.generated.JobIdRequestBody; @@ -42,61 +35,26 @@ import io.airbyte.api.client.model.generated.JobStatus; import io.airbyte.api.client.model.generated.LogType; import io.airbyte.api.client.model.generated.LogsRequestBody; -import io.airbyte.api.client.model.generated.NamespaceDefinitionType; -import io.airbyte.api.client.model.generated.OperationCreate; -import io.airbyte.api.client.model.generated.OperationIdRequestBody; -import io.airbyte.api.client.model.generated.OperationRead; -import io.airbyte.api.client.model.generated.OperatorConfiguration; -import io.airbyte.api.client.model.generated.OperatorNormalization; -import io.airbyte.api.client.model.generated.OperatorNormalization.OptionEnum; -import io.airbyte.api.client.model.generated.OperatorType; -import io.airbyte.api.client.model.generated.SourceCreate; -import io.airbyte.api.client.model.generated.SourceDefinitionCreate; import io.airbyte.api.client.model.generated.SourceDefinitionIdRequestBody; import io.airbyte.api.client.model.generated.SourceDefinitionRead; -import io.airbyte.api.client.model.generated.SourceDiscoverSchemaRequestBody; -import io.airbyte.api.client.model.generated.SourceIdRequestBody; import io.airbyte.api.client.model.generated.SourceRead; import io.airbyte.api.client.model.generated.SyncMode; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.lang.MoreBooleans; -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.commons.util.MoreProperties; import io.airbyte.container_orchestrator.ContainerOrchestratorApp; -import io.airbyte.db.Database; -import io.airbyte.test.airbyte_test_container.AirbyteTestContainer; -import io.airbyte.test.utils.DatabaseConnectionHelper; -import io.airbyte.test.utils.PostgreSQLContainerHelper; -import io.fabric8.kubernetes.client.DefaultKubernetesClient; +import io.airbyte.test.utils.AirbyteAcceptanceTestHarness; import io.fabric8.kubernetes.client.KubernetesClient; -import java.io.File; import java.io.IOException; -import java.net.Inet4Address; -import java.net.URI; import java.net.URISyntaxException; -import java.net.UnknownHostException; import java.nio.charset.Charset; -import java.nio.charset.StandardCharsets; -import java.nio.file.Path; import java.sql.SQLException; -import java.time.Duration; -import java.time.Instant; -import java.util.ArrayList; import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; import java.util.List; -import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; -import org.jooq.JSONB; -import org.jooq.Record; -import org.jooq.Result; -import org.jooq.SQLDialect; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; @@ -111,8 +69,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.slf4j.MDC; -import org.testcontainers.containers.PostgreSQLContainer; -import org.testcontainers.utility.MountableFile; /** * The class test for advanced platform functionality that can be affected by the networking @@ -137,92 +93,19 @@ public class AdvancedAcceptanceTests { private static final Logger LOGGER = LoggerFactory.getLogger(AdvancedAcceptanceTests.class); - private static final String DOCKER_COMPOSE_FILE_NAME = "docker-compose.yaml"; - // assume env file is one directory level up from airbyte-tests. - private final static File ENV_FILE = Path.of(System.getProperty("user.dir")).getParent().resolve(".env").toFile(); - - private static final String SOURCE_E2E_TEST_CONNECTOR_VERSION = "0.1.1"; - private static final String DESTINATION_E2E_TEST_CONNECTOR_VERSION = "0.1.1"; - - private static final Charset UTF8 = StandardCharsets.UTF_8; - private static final boolean IS_KUBE = System.getenv().containsKey("KUBE"); - private static final boolean IS_MINIKUBE = System.getenv().containsKey("IS_MINIKUBE"); - private static final boolean IS_GKE = System.getenv().containsKey("IS_GKE"); - private static final boolean IS_MAC = System.getProperty("os.name").startsWith("Mac"); - private static final boolean USE_EXTERNAL_DEPLOYMENT = - System.getenv("USE_EXTERNAL_DEPLOYMENT") != null && System.getenv("USE_EXTERNAL_DEPLOYMENT").equalsIgnoreCase("true"); - - private static final String OUTPUT_NAMESPACE_PREFIX = "output_namespace_"; - private static final String OUTPUT_NAMESPACE = OUTPUT_NAMESPACE_PREFIX + "${SOURCE_NAMESPACE}"; - private static final String OUTPUT_STREAM_PREFIX = "output_table_"; - private static final String TABLE_NAME = "id_and_name"; - private static final String STREAM_NAME = TABLE_NAME; - private static final String COLUMN_ID = "id"; - private static final String COLUMN_NAME = "name"; - private static final String COLUMN_NAME_DATA = "_airbyte_data"; - private static final String SOURCE_USERNAME = "sourceusername"; - private static final String SOURCE_PASSWORD = "hunter2"; - - /** - * When the acceptance tests are run against a local instance of docker-compose or KUBE then these - * test containers are used. When we run these tests in GKE, we spawn a source and destination - * postgres database ane use them for testing. - */ - private static PostgreSQLContainer sourcePsql; - private static PostgreSQLContainer destinationPsql; - private static AirbyteTestContainer airbyteTestContainer; + private static AirbyteAcceptanceTestHarness testHarness; private static AirbyteApiClient apiClient; private static UUID workspaceId; - - private List sourceIds; - private List connectionIds; - private List destinationIds; - private List operationIds; - - private static KubernetesClient kubernetesClient = null; + private static KubernetesClient kubernetesClient; @SuppressWarnings("UnstableApiUsage") @BeforeAll - public static void init() throws URISyntaxException, IOException, InterruptedException, ApiException, SQLException { - if (IS_GKE && !IS_KUBE) { - throw new RuntimeException("KUBE Flag should also be enabled if GKE flag is enabled"); - } - if (!IS_GKE) { - sourcePsql = new PostgreSQLContainer("postgres:13-alpine") - .withUsername(SOURCE_USERNAME) - .withPassword(SOURCE_PASSWORD); - sourcePsql.start(); - } - - if (IS_KUBE) { - kubernetesClient = new DefaultKubernetesClient(); - } - - // by default use airbyte deployment governed by a test container. - if (!USE_EXTERNAL_DEPLOYMENT) { - LOGGER.info("Using deployment of airbyte managed by test containers."); - airbyteTestContainer = new AirbyteTestContainer.Builder(new File(Resources.getResource(DOCKER_COMPOSE_FILE_NAME).toURI())) - .setEnv(MoreProperties.envFileToProperties(ENV_FILE)) - // override env VERSION to use dev to test current build of airbyte. - .setEnvVariable("VERSION", "dev") - // override to use test mounts. - .setEnvVariable("DATA_DOCKER_MOUNT", "airbyte_data_migration_test") - .setEnvVariable("DB_DOCKER_MOUNT", "airbyte_db_migration_test") - .setEnvVariable("WORKSPACE_DOCKER_MOUNT", "airbyte_workspace_migration_test") - .setEnvVariable("LOCAL_ROOT", "/tmp/airbyte_local_migration_test") - .setEnvVariable("LOCAL_DOCKER_MOUNT", "/tmp/airbyte_local_migration_test") - .build(); - airbyteTestContainer.startBlocking(); - } else { - LOGGER.info("Using external deployment of airbyte."); - } - + public static void init() throws URISyntaxException, IOException, InterruptedException, ApiException { apiClient = new AirbyteApiClient( new ApiClient().setScheme("http") .setHost("localhost") .setPort(8001) .setBasePath("/api")); - // work in whatever default workspace is present. workspaceId = apiClient.getWorkspaceApi().listWorkspaces().getWorkspaces().get(0).getWorkspaceId(); LOGGER.info("workspaceId = " + workspaceId); @@ -237,99 +120,50 @@ public static void init() throws URISyntaxException, IOException, InterruptedExc LOGGER.info("pg source definition: {}", sourceDef.getDockerImageTag()); LOGGER.info("pg destination definition: {}", destinationDef.getDockerImageTag()); - if (!IS_GKE) { - destinationPsql = new PostgreSQLContainer("postgres:13-alpine"); - destinationPsql.start(); - } - + testHarness = new AirbyteAcceptanceTestHarness(apiClient, workspaceId); + kubernetesClient = testHarness.getKubernetesClient(); } @AfterAll public static void end() { - if (!IS_GKE) { - sourcePsql.stop(); - destinationPsql.stop(); - } - - if (airbyteTestContainer != null) { - airbyteTestContainer.stop(); - } + testHarness.stopDbAndContainers(); } @BeforeEach public void setup() throws URISyntaxException, IOException, SQLException { - sourceIds = Lists.newArrayList(); - connectionIds = Lists.newArrayList(); - destinationIds = Lists.newArrayList(); - operationIds = Lists.newArrayList(); - - if (IS_GKE) { - // seed database. - final Database database = getSourceDatabase(); - final Path path = Path.of(MoreResources.readResourceAsFile("postgres_init.sql").toURI()); - final StringBuilder query = new StringBuilder(); - for (final String line : java.nio.file.Files.readAllLines(path, UTF8)) { - if (line != null && !line.isEmpty()) { - query.append(line); - } - } - database.query(context -> context.execute(query.toString())); - } else { - PostgreSQLContainerHelper.runSqlScript(MountableFile.forClasspathResource("postgres_init.sql"), sourcePsql); - } + testHarness.setup(); } @AfterEach public void tearDown() { - try { - clearSourceDbData(); - clearDestinationDbData(); - - for (final UUID operationId : operationIds) { - deleteOperation(operationId); - } - - for (final UUID connectionId : connectionIds) { - disableConnection(connectionId); - } - - for (final UUID sourceId : sourceIds) { - deleteSource(sourceId); - } - - for (final UUID destinationId : destinationIds) { - deleteDestination(destinationId); - } - } catch (Exception e) { - LOGGER.error("Error tearing down test fixtures:", e); - } + testHarness.cleanup(); } @RetryingTest(3) @Order(1) public void testManualSync() throws Exception { final String connectionName = "test-connection"; - final UUID sourceId = createPostgresSource().getSourceId(); - final UUID destinationId = createDestination().getDestinationId(); - final UUID operationId = createOperation().getOperationId(); - final AirbyteCatalog catalog = discoverSourceSchema(sourceId); + final UUID sourceId = testHarness.createPostgresSource().getSourceId(); + final UUID destinationId = testHarness.createDestination().getDestinationId(); + final UUID operationId = testHarness.createOperation().getOperationId(); + final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); final SyncMode syncMode = SyncMode.FULL_REFRESH; final DestinationSyncMode destinationSyncMode = DestinationSyncMode.OVERWRITE; catalog.getStreams().forEach(s -> s.getConfig().syncMode(syncMode).destinationSyncMode(destinationSyncMode)); final UUID connectionId = - createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, null).getConnectionId(); + testHarness.createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, null).getConnectionId(); final JobInfoRead connectionSyncRead = apiClient.getConnectionApi().syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead.getJob()); - assertSourceAndDestinationDbInSync(false); + testHarness.assertSourceAndDestinationDbInSync(false); } @RetryingTest(3) @Order(2) public void testCheckpointing() throws Exception { - final SourceDefinitionRead sourceDefinition = createE2eSourceDefinition(); - final DestinationDefinitionRead destinationDefinition = createE2eDestinationDefinition(); + final SourceDefinitionRead sourceDefinition = testHarness.createE2eSourceDefinition(); + final DestinationDefinitionRead destinationDefinition = testHarness.createE2eDestinationDefinition(); - final SourceRead source = createSource( + final SourceRead source = testHarness.createSource( "E2E Test Source -" + UUID.randomUUID(), workspaceId, sourceDefinition.getSourceDefinitionId(), @@ -338,7 +172,7 @@ public void testCheckpointing() throws Exception { .put("throw_after_n_records", 100) .build())); - final DestinationRead destination = createDestination( + final DestinationRead destination = testHarness.createDestination( "E2E Test Destination -" + UUID.randomUUID(), workspaceId, destinationDefinition.getDestinationDefinitionId(), @@ -347,7 +181,7 @@ public void testCheckpointing() throws Exception { final String connectionName = "test-connection"; final UUID sourceId = source.getSourceId(); final UUID destinationId = destination.getDestinationId(); - final AirbyteCatalog catalog = discoverSourceSchema(sourceId); + final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); final AirbyteStream stream = catalog.getStreams().get(0).getStream(); assertEquals( @@ -362,7 +196,8 @@ public void testCheckpointing() throws Exception { .cursorField(List.of(COLUMN_ID)) .destinationSyncMode(destinationSyncMode)); final UUID connectionId = - createConnection(connectionName, sourceId, destinationId, Collections.emptyList(), catalog, null).getConnectionId(); + testHarness.createConnection(connectionName, sourceId, destinationId, Collections.emptyList(), catalog, null) + .getConnectionId(); final JobInfoRead connectionSyncRead1 = apiClient.getConnectionApi() .syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); @@ -414,10 +249,10 @@ public void testRedactionOfSensitiveRequestBodies() throws Exception { @RetryingTest(3) @Order(4) public void testBackpressure() throws Exception { - final SourceDefinitionRead sourceDefinition = createE2eSourceDefinition(); - final DestinationDefinitionRead destinationDefinition = createE2eDestinationDefinition(); + final SourceDefinitionRead sourceDefinition = testHarness.createE2eSourceDefinition(); + final DestinationDefinitionRead destinationDefinition = testHarness.createE2eDestinationDefinition(); - final SourceRead source = createSource( + final SourceRead source = testHarness.createSource( "E2E Test Source -" + UUID.randomUUID(), workspaceId, sourceDefinition.getSourceDefinitionId(), @@ -426,7 +261,7 @@ public void testBackpressure() throws Exception { .put("max_records", 5000) .build())); - final DestinationRead destination = createDestination( + final DestinationRead destination = testHarness.createDestination( "E2E Test Destination -" + UUID.randomUUID(), workspaceId, destinationDefinition.getDestinationDefinitionId(), @@ -438,10 +273,10 @@ public void testBackpressure() throws Exception { final String connectionName = "test-connection"; final UUID sourceId = source.getSourceId(); final UUID destinationId = destination.getDestinationId(); - final AirbyteCatalog catalog = discoverSourceSchema(sourceId); + final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); final UUID connectionId = - createConnection(connectionName, sourceId, destinationId, Collections.emptyList(), catalog, null) + testHarness.createConnection(connectionName, sourceId, destinationId, Collections.emptyList(), catalog, null) .getConnectionId(); final JobInfoRead connectionSyncRead1 = apiClient.getConnectionApi() .syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); @@ -477,9 +312,9 @@ public void testBackpressure() throws Exception { matches = "true") public void testDowntimeDuringSync() throws Exception { final String connectionName = "test-connection"; - final UUID sourceId = createPostgresSource().getSourceId(); - final UUID destinationId = createDestination().getDestinationId(); - final AirbyteCatalog catalog = discoverSourceSchema(sourceId); + final UUID sourceId = testHarness.createPostgresSource().getSourceId(); + final UUID destinationId = testHarness.createDestination().getDestinationId(); + final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); final SyncMode syncMode = SyncMode.FULL_REFRESH; final DestinationSyncMode destinationSyncMode = DestinationSyncMode.OVERWRITE; catalog.getStreams().forEach(s -> s.getConfig().syncMode(syncMode).destinationSyncMode(destinationSyncMode)); @@ -488,7 +323,7 @@ public void testDowntimeDuringSync() throws Exception { LOGGER.info("Checking " + input); final UUID connectionId = - createConnection(connectionName, sourceId, destinationId, List.of(), catalog, null).getConnectionId(); + testHarness.createConnection(connectionName, sourceId, destinationId, List.of(), catalog, null).getConnectionId(); JobInfoRead connectionSyncRead = null; @@ -547,15 +382,15 @@ public void testDowntimeDuringSync() throws Exception { matches = "true") public void testCancelSyncWithInterruption() throws Exception { final String connectionName = "test-connection"; - final UUID sourceId = createPostgresSource().getSourceId(); - final UUID destinationId = createDestination().getDestinationId(); - final UUID operationId = createOperation().getOperationId(); - final AirbyteCatalog catalog = discoverSourceSchema(sourceId); + final UUID sourceId = testHarness.createPostgresSource().getSourceId(); + final UUID destinationId = testHarness.createDestination().getDestinationId(); + final UUID operationId = testHarness.createOperation().getOperationId(); + final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); final SyncMode syncMode = SyncMode.FULL_REFRESH; final DestinationSyncMode destinationSyncMode = DestinationSyncMode.OVERWRITE; catalog.getStreams().forEach(s -> s.getConfig().syncMode(syncMode).destinationSyncMode(destinationSyncMode)); final UUID connectionId = - createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, null).getConnectionId(); + testHarness.createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, null).getConnectionId(); final JobInfoRead connectionSyncRead = apiClient.getConnectionApi().syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); waitWhileJobHasStatus(apiClient.getJobsApi(), connectionSyncRead.getJob(), Set.of(JobStatus.RUNNING)); @@ -578,17 +413,17 @@ public void testCancelSyncWithInterruption() throws Exception { matches = "true") public void testCuttingOffPodBeforeFilesTransfer() throws Exception { final String connectionName = "test-connection"; - final UUID sourceId = createPostgresSource().getSourceId(); - final UUID destinationId = createDestination().getDestinationId(); - final UUID operationId = createOperation().getOperationId(); - final AirbyteCatalog catalog = discoverSourceSchema(sourceId); + final UUID sourceId = testHarness.createPostgresSource().getSourceId(); + final UUID destinationId = testHarness.createDestination().getDestinationId(); + final UUID operationId = testHarness.createOperation().getOperationId(); + final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); final SyncMode syncMode = SyncMode.FULL_REFRESH; final DestinationSyncMode destinationSyncMode = DestinationSyncMode.OVERWRITE; catalog.getStreams().forEach(s -> s.getConfig().syncMode(syncMode).destinationSyncMode(destinationSyncMode)); LOGGER.info("Creating connection..."); final UUID connectionId = - createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, null).getConnectionId(); + testHarness.createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, null).getConnectionId(); LOGGER.info("Waiting for connection to be available in Temporal..."); @@ -622,17 +457,17 @@ public void testCuttingOffPodBeforeFilesTransfer() throws Exception { matches = "true") public void testCancelSyncWhenCancelledWhenWorkerIsNotRunning() throws Exception { final String connectionName = "test-connection"; - final UUID sourceId = createPostgresSource().getSourceId(); - final UUID destinationId = createDestination().getDestinationId(); - final UUID operationId = createOperation().getOperationId(); - final AirbyteCatalog catalog = discoverSourceSchema(sourceId); + final UUID sourceId = testHarness.createPostgresSource().getSourceId(); + final UUID destinationId = testHarness.createDestination().getDestinationId(); + final UUID operationId = testHarness.createOperation().getOperationId(); + final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); final SyncMode syncMode = SyncMode.FULL_REFRESH; final DestinationSyncMode destinationSyncMode = DestinationSyncMode.OVERWRITE; catalog.getStreams().forEach(s -> s.getConfig().syncMode(syncMode).destinationSyncMode(destinationSyncMode)); LOGGER.info("Creating connection..."); final UUID connectionId = - createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, null).getConnectionId(); + testHarness.createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, null).getConnectionId(); LOGGER.info("Waiting for connection to be available in Temporal..."); @@ -675,389 +510,4 @@ public void testCancelSyncWhenCancelledWhenWorkerIsNotRunning() throws Exception assertEquals(JobStatus.CANCELLED, resp.get().getJob().getStatus()); } - private AirbyteCatalog discoverSourceSchema(final UUID sourceId) throws ApiException { - return apiClient.getSourceApi().discoverSchemaForSource(new SourceDiscoverSchemaRequestBody().sourceId(sourceId)).getCatalog(); - } - - private void assertSourceAndDestinationDbInSync(final boolean withScdTable) throws Exception { - final Database source = getSourceDatabase(); - - final Set sourceTables = listAllTables(source); - final Set sourceTablesWithRawTablesAdded = addAirbyteGeneratedTables(withScdTable, sourceTables); - final Database destination = getDestinationDatabase(); - final Set destinationTables = listAllTables(destination); - assertEquals(sourceTablesWithRawTablesAdded, destinationTables, - String.format("streams did not match.\n source stream names: %s\n destination stream names: %s\n", sourceTables, destinationTables)); - - for (final SchemaTableNamePair pair : sourceTables) { - final List sourceRecords = retrieveSourceRecords(source, pair.getFullyQualifiedTableName()); - assertRawDestinationContains(sourceRecords, pair); - } - } - - private static Database getSourceDatabase() { - if (IS_KUBE && IS_GKE) { - return GKEPostgresConfig.getSourceDatabase(); - } - return getDatabase(sourcePsql); - } - - private static Database getDatabase(final PostgreSQLContainer db) { - return new Database(DatabaseConnectionHelper.createDslContext(db, SQLDialect.POSTGRES)); - } - - private Database getDestinationDatabase() { - if (IS_KUBE && IS_GKE) { - return GKEPostgresConfig.getDestinationDatabase(); - } - return getDatabase(destinationPsql); - } - - private Set listAllTables(final Database database) throws SQLException { - return database.query( - context -> { - final Result fetch = - context.fetch( - "SELECT tablename, schemaname FROM pg_catalog.pg_tables WHERE schemaname != 'pg_catalog' AND schemaname != 'information_schema'"); - return fetch.stream() - .map(record -> { - final var schemaName = (String) record.get("schemaname"); - final var tableName = (String) record.get("tablename"); - return new SchemaTableNamePair(schemaName, tableName); - }) - .collect(Collectors.toSet()); - }); - } - - private Set addAirbyteGeneratedTables(final boolean withScdTable, final Set sourceTables) { - return sourceTables.stream().flatMap(x -> { - final String cleanedNameStream = x.tableName.replace(".", "_"); - final List explodedStreamNames = new ArrayList<>(List.of( - new SchemaTableNamePair(OUTPUT_NAMESPACE_PREFIX + x.schemaName, - String.format("_airbyte_raw_%s%s", OUTPUT_STREAM_PREFIX, cleanedNameStream)), - new SchemaTableNamePair(OUTPUT_NAMESPACE_PREFIX + x.schemaName, String.format("%s%s", OUTPUT_STREAM_PREFIX, cleanedNameStream)))); - if (withScdTable) { - explodedStreamNames - .add(new SchemaTableNamePair("_airbyte_" + OUTPUT_NAMESPACE_PREFIX + x.schemaName, - String.format("%s%s_stg", OUTPUT_STREAM_PREFIX, cleanedNameStream))); - explodedStreamNames - .add(new SchemaTableNamePair(OUTPUT_NAMESPACE_PREFIX + x.schemaName, String.format("%s%s_scd", OUTPUT_STREAM_PREFIX, cleanedNameStream))); - } - return explodedStreamNames.stream(); - }).collect(Collectors.toSet()); - } - - private void assertRawDestinationContains(final List sourceRecords, final SchemaTableNamePair pair) throws Exception { - final Set destinationRecords = new HashSet<>(retrieveRawDestinationRecords(pair)); - - assertEquals(sourceRecords.size(), destinationRecords.size(), - String.format("destination contains: %s record. source contains: %s, \nsource records %s \ndestination records: %s", - destinationRecords.size(), sourceRecords.size(), sourceRecords, destinationRecords)); - - for (final JsonNode sourceStreamRecord : sourceRecords) { - assertTrue(destinationRecords.contains(sourceStreamRecord), - String.format("destination does not contain record:\n %s \n destination contains:\n %s\n", - sourceStreamRecord, destinationRecords)); - } - } - - private ConnectionRead createConnection(final String name, - final UUID sourceId, - final UUID destinationId, - final List operationIds, - final AirbyteCatalog catalog, - final ConnectionSchedule schedule) - throws ApiException { - final ConnectionRead connection = apiClient.getConnectionApi().createConnection( - new ConnectionCreate() - .status(ConnectionStatus.ACTIVE) - .sourceId(sourceId) - .destinationId(destinationId) - .syncCatalog(catalog) - .schedule(schedule) - .operationIds(operationIds) - .name(name) - .namespaceDefinition(NamespaceDefinitionType.CUSTOMFORMAT) - .namespaceFormat(OUTPUT_NAMESPACE) - .prefix(OUTPUT_STREAM_PREFIX)); - connectionIds.add(connection.getConnectionId()); - return connection; - } - - private DestinationRead createDestination() throws ApiException { - return createDestination( - "AccTestDestination-" + UUID.randomUUID(), - workspaceId, - getDestinationDefId(), - getDestinationDbConfig()); - } - - private DestinationRead createDestination(final String name, final UUID workspaceId, final UUID destinationDefId, final JsonNode destinationConfig) - throws ApiException { - final DestinationRead destination = - apiClient.getDestinationApi().createDestination(new DestinationCreate() - .name(name) - .connectionConfiguration(Jsons.jsonNode(destinationConfig)) - .workspaceId(workspaceId) - .destinationDefinitionId(destinationDefId)); - destinationIds.add(destination.getDestinationId()); - return destination; - } - - private OperationRead createOperation() throws ApiException { - final OperatorConfiguration normalizationConfig = new OperatorConfiguration() - .operatorType(OperatorType.NORMALIZATION).normalization(new OperatorNormalization().option( - OptionEnum.BASIC)); - - final OperationCreate operationCreate = new OperationCreate() - .workspaceId(workspaceId) - .name("AccTestDestination-" + UUID.randomUUID()).operatorConfiguration(normalizationConfig); - - final OperationRead operation = apiClient.getOperationApi().createOperation(operationCreate); - operationIds.add(operation.getOperationId()); - return operation; - } - - private UUID getDestinationDefId() throws ApiException { - return apiClient.getDestinationDefinitionApi().listDestinationDefinitions().getDestinationDefinitions() - .stream() - .filter(dr -> dr.getName().toLowerCase().contains("postgres")) - .findFirst() - .orElseThrow() - .getDestinationDefinitionId(); - } - - private List retrieveSourceRecords(final Database database, final String table) throws SQLException { - return database.query(context -> context.fetch(String.format("SELECT * FROM %s;", table))) - .stream() - .map(Record::intoMap) - .map(Jsons::jsonNode) - .collect(Collectors.toList()); - } - - private List retrieveDestinationRecords(final Database database, final String table) throws SQLException { - return database.query(context -> context.fetch(String.format("SELECT * FROM %s;", table))) - .stream() - .map(Record::intoMap) - .map(r -> r.get(COLUMN_NAME_DATA)) - .map(f -> (JSONB) f) - .map(JSONB::data) - .map(Jsons::deserialize) - .map(Jsons::jsonNode) - .collect(Collectors.toList()); - } - - private List retrieveRawDestinationRecords(final SchemaTableNamePair pair) throws Exception { - final Database destination = getDestinationDatabase(); - final Set namePairs = listAllTables(destination); - - final String rawStreamName = String.format("_airbyte_raw_%s%s", OUTPUT_STREAM_PREFIX, pair.tableName.replace(".", "_")); - final SchemaTableNamePair rawTablePair = new SchemaTableNamePair(OUTPUT_NAMESPACE_PREFIX + pair.schemaName, rawStreamName); - assertTrue(namePairs.contains(rawTablePair), "can't find a non-normalized version (raw) of " + rawTablePair.getFullyQualifiedTableName()); - - return retrieveDestinationRecords(destination, rawTablePair.getFullyQualifiedTableName()); - } - - private JsonNode getSourceDbConfig() { - return getDbConfig(sourcePsql, false, false, Type.SOURCE); - } - - private JsonNode getDestinationDbConfig() { - return getDbConfig(destinationPsql, false, true, Type.DESTINATION); - } - - private JsonNode getDestinationDbConfigWithHiddenPassword() { - return getDbConfig(destinationPsql, true, true, Type.DESTINATION); - } - - private JsonNode getDbConfig(final PostgreSQLContainer psql, final boolean hiddenPassword, final boolean withSchema, final Type connectorType) { - try { - final Map dbConfig = (IS_KUBE && IS_GKE) ? GKEPostgresConfig.dbConfig(connectorType, hiddenPassword, withSchema) - : localConfig(psql, hiddenPassword, withSchema); - return Jsons.jsonNode(dbConfig); - } catch (final Exception e) { - throw new RuntimeException(e); - } - } - - private Map localConfig(final PostgreSQLContainer psql, final boolean hiddenPassword, final boolean withSchema) - throws UnknownHostException { - final Map dbConfig = new HashMap<>(); - // don't use psql.getHost() directly since the ip we need differs depending on environment - if (IS_KUBE) { - if (IS_MINIKUBE) { - // used with minikube driver=none instance - dbConfig.put("host", Inet4Address.getLocalHost().getHostAddress()); - } else { - // used on a single node with docker driver - dbConfig.put("host", "host.docker.internal"); - } - } else if (IS_MAC) { - dbConfig.put("host", "host.docker.internal"); - } else { - dbConfig.put("host", "localhost"); - } - - if (hiddenPassword) { - dbConfig.put("password", "**********"); - } else { - dbConfig.put("password", psql.getPassword()); - } - - dbConfig.put("port", psql.getFirstMappedPort()); - dbConfig.put("database", psql.getDatabaseName()); - dbConfig.put("username", psql.getUsername()); - dbConfig.put("ssl", false); - - if (withSchema) { - dbConfig.put("schema", "public"); - } - return dbConfig; - } - - private SourceDefinitionRead createE2eSourceDefinition() throws ApiException { - return apiClient.getSourceDefinitionApi().createSourceDefinition(new SourceDefinitionCreate() - .name("E2E Test Source") - .dockerRepository("airbyte/source-e2e-test") - .dockerImageTag(SOURCE_E2E_TEST_CONNECTOR_VERSION) - .documentationUrl(URI.create("https://example.com"))); - } - - private DestinationDefinitionRead createE2eDestinationDefinition() throws ApiException { - return apiClient.getDestinationDefinitionApi().createDestinationDefinition(new DestinationDefinitionCreate() - .name("E2E Test Destination") - .dockerRepository("airbyte/destination-e2e-test") - .dockerImageTag(DESTINATION_E2E_TEST_CONNECTOR_VERSION) - .documentationUrl(URI.create("https://example.com"))); - } - - private SourceRead createPostgresSource() throws ApiException { - return createSource( - "acceptanceTestDb-" + UUID.randomUUID(), - workspaceId, - getPostgresSourceDefinitionId(), - getSourceDbConfig()); - } - - private SourceRead createSource(final String name, final UUID workspaceId, final UUID sourceDefId, final JsonNode sourceConfig) - throws ApiException { - final SourceRead source = apiClient.getSourceApi().createSource(new SourceCreate() - .name(name) - .sourceDefinitionId(sourceDefId) - .workspaceId(workspaceId) - .connectionConfiguration(sourceConfig)); - sourceIds.add(source.getSourceId()); - return source; - } - - private UUID getPostgresSourceDefinitionId() throws ApiException { - return apiClient.getSourceDefinitionApi().listSourceDefinitions().getSourceDefinitions() - .stream() - .filter(sourceRead -> sourceRead.getName().equalsIgnoreCase("postgres")) - .findFirst() - .orElseThrow() - .getSourceDefinitionId(); - } - - private void clearSourceDbData() throws SQLException { - final Database database = getSourceDatabase(); - final Set pairs = listAllTables(database); - for (final SchemaTableNamePair pair : pairs) { - database.query(context -> context.execute(String.format("DROP TABLE %s.%s", pair.schemaName, pair.tableName))); - } - } - - private void clearDestinationDbData() throws SQLException { - final Database database = getDestinationDatabase(); - final Set pairs = listAllTables(database); - for (final SchemaTableNamePair pair : pairs) { - database.query(context -> context.execute(String.format("DROP TABLE %s.%s CASCADE", pair.schemaName, pair.tableName))); - } - } - - private void deleteSource(final UUID sourceId) throws ApiException { - apiClient.getSourceApi().deleteSource(new SourceIdRequestBody().sourceId(sourceId)); - } - - private void disableConnection(final UUID connectionId) throws ApiException { - final ConnectionRead connection = apiClient.getConnectionApi().getConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - final ConnectionUpdate connectionUpdate = - new ConnectionUpdate() - .prefix(connection.getPrefix()) - .connectionId(connectionId) - .operationIds(connection.getOperationIds()) - .status(ConnectionStatus.DEPRECATED) - .schedule(connection.getSchedule()) - .syncCatalog(connection.getSyncCatalog()); - apiClient.getConnectionApi().updateConnection(connectionUpdate); - } - - private void deleteDestination(final UUID destinationId) throws ApiException { - apiClient.getDestinationApi().deleteDestination(new DestinationIdRequestBody().destinationId(destinationId)); - } - - private void deleteOperation(final UUID destinationId) throws ApiException { - apiClient.getOperationApi().deleteOperation(new OperationIdRequestBody().operationId(destinationId)); - } - - private static void waitForSuccessfulJob(final JobsApi jobsApi, final JobRead originalJob) throws InterruptedException, ApiException { - final JobRead job = waitWhileJobHasStatus(jobsApi, originalJob, Sets.newHashSet(JobStatus.PENDING, JobStatus.RUNNING)); - - if (!JobStatus.SUCCEEDED.equals(job.getStatus())) { - // If a job failed during testing, show us why. - final JobIdRequestBody id = new JobIdRequestBody(); - id.setId(originalJob.getId()); - for (final AttemptInfoRead attemptInfo : jobsApi.getJobInfo(id).getAttempts()) { - LOGGER.warn("Unsuccessful job attempt " + attemptInfo.getAttempt().getId() - + " with status " + job.getStatus() + " produced log output as follows: " + attemptInfo.getLogs().getLogLines()); - } - } - assertEquals(JobStatus.SUCCEEDED, job.getStatus()); - } - - private static JobRead waitWhileJobHasStatus(final JobsApi jobsApi, final JobRead originalJob, final Set jobStatuses) - throws InterruptedException, ApiException { - return waitWhileJobHasStatus(jobsApi, originalJob, jobStatuses, Duration.ofMinutes(6)); - } - - @SuppressWarnings("BusyWait") - private static JobRead waitWhileJobHasStatus(final JobsApi jobsApi, - final JobRead originalJob, - final Set jobStatuses, - final Duration maxWaitTime) - throws InterruptedException, ApiException { - JobRead job = originalJob; - - final Instant waitStart = Instant.now(); - while (jobStatuses.contains(job.getStatus())) { - if (Duration.between(waitStart, Instant.now()).compareTo(maxWaitTime) > 0) { - LOGGER.info("Max wait time of {} has been reached. Stopping wait.", maxWaitTime); - break; - } - sleep(1000); - - job = jobsApi.getJobInfo(new JobIdRequestBody().id(job.getId())).getJob(); - LOGGER.info("waiting: job id: {} config type: {} status: {}", job.getId(), job.getConfigType(), job.getStatus()); - } - return job; - } - - @SuppressWarnings("BusyWait") - private static ConnectionState waitForConnectionState(final AirbyteApiClient apiClient, final UUID connectionId) - throws ApiException, InterruptedException { - ConnectionState connectionState = apiClient.getConnectionApi().getState(new ConnectionIdRequestBody().connectionId(connectionId)); - int count = 0; - while (count < 60 && (connectionState.getState() == null || connectionState.getState().isNull())) { - LOGGER.info("fetching connection state. attempt: {}", count++); - connectionState = apiClient.getConnectionApi().getState(new ConnectionIdRequestBody().connectionId(connectionId)); - sleep(1000); - } - return connectionState; - } - - public enum Type { - SOURCE, - DESTINATION - } - } diff --git a/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/BasicAcceptanceTests.java b/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/BasicAcceptanceTests.java index 7b98ca7a4a8a..f6289d41b98e 100644 --- a/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/BasicAcceptanceTests.java +++ b/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/BasicAcceptanceTests.java @@ -5,6 +5,11 @@ package io.airbyte.test.acceptance; import static io.airbyte.api.client.model.generated.ConnectionSchedule.TimeUnitEnum.MINUTES; +import static io.airbyte.test.utils.AirbyteAcceptanceTestHarness.COLUMN_ID; +import static io.airbyte.test.utils.AirbyteAcceptanceTestHarness.COLUMN_NAME; +import static io.airbyte.test.utils.AirbyteAcceptanceTestHarness.STREAM_NAME; +import static io.airbyte.test.utils.AirbyteAcceptanceTestHarness.waitForSuccessfulJob; +import static io.airbyte.test.utils.AirbyteAcceptanceTestHarness.waitWhileJobHasStatus; import static java.lang.Thread.sleep; import static org.junit.jupiter.api.Assertions.*; @@ -13,37 +18,21 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Sets; -import com.google.common.io.Resources; import io.airbyte.api.client.AirbyteApiClient; -import io.airbyte.api.client.generated.JobsApi; import io.airbyte.api.client.invoker.generated.ApiClient; import io.airbyte.api.client.invoker.generated.ApiException; import io.airbyte.api.client.model.generated.*; import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.util.MoreProperties; import io.airbyte.db.Database; -import io.airbyte.test.airbyte_test_container.AirbyteTestContainer; -import io.airbyte.test.utils.DatabaseConnectionHelper; +import io.airbyte.test.utils.AirbyteAcceptanceTestHarness; import io.airbyte.test.utils.PostgreSQLContainerHelper; -import io.airbyte.workers.temporal.TemporalUtils; -import io.airbyte.workers.temporal.scheduling.ConnectionManagerWorkflow; +import io.airbyte.test.utils.SchemaTableNamePair; import io.airbyte.workers.temporal.scheduling.state.WorkflowState; -import io.temporal.client.WorkflowClient; -import io.temporal.serviceclient.WorkflowServiceStubs; -import java.io.File; import java.io.IOException; -import java.net.URI; import java.net.URISyntaxException; -import java.nio.file.Path; import java.sql.SQLException; import java.time.Duration; -import java.time.Instant; import java.util.*; -import java.util.stream.Collectors; -import org.jooq.JSONB; -import org.jooq.Record; -import org.jooq.Result; -import org.jooq.SQLDialect; import org.junit.jupiter.api.*; import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable; import org.slf4j.Logger; @@ -66,78 +55,20 @@ matches = "true") public class BasicAcceptanceTests { - private static final Logger LOGGER = LoggerFactory.getLogger(AdvancedAcceptanceTests.class); - - private static final String DOCKER_COMPOSE_FILE_NAME = "docker-compose.yaml"; - // assume env file is one directory level up from airbyte-tests. - private final static File ENV_FILE = Path.of(System.getProperty("user.dir")).getParent().resolve(".env").toFile(); - - private static final String SOURCE_E2E_TEST_CONNECTOR_VERSION = "0.1.1"; - private static final String DESTINATION_E2E_TEST_CONNECTOR_VERSION = "0.1.1"; - - private static final boolean IS_MAC = System.getProperty("os.name").startsWith("Mac"); - private static final boolean USE_EXTERNAL_DEPLOYMENT = - System.getenv("USE_EXTERNAL_DEPLOYMENT") != null && System.getenv("USE_EXTERNAL_DEPLOYMENT").equalsIgnoreCase("true"); - - private static final String OUTPUT_NAMESPACE_PREFIX = "output_namespace_"; - private static final String OUTPUT_NAMESPACE = OUTPUT_NAMESPACE_PREFIX + "${SOURCE_NAMESPACE}"; - private static final String OUTPUT_STREAM_PREFIX = "output_table_"; - private static final String TABLE_NAME = "id_and_name"; - private static final String STREAM_NAME = TABLE_NAME; - private static final String COLUMN_ID = "id"; - private static final String COLUMN_NAME = "name"; - private static final String COLUMN_NAME_DATA = "_airbyte_data"; - private static final String SOURCE_USERNAME = "sourceusername"; - private static final String SOURCE_PASSWORD = "hunter2"; - - /** - * When the acceptance tests are run against a local instance of docker-compose these test - * containers are used. - */ - private static PostgreSQLContainer sourcePsql; - private static PostgreSQLContainer destinationPsql; - private static AirbyteTestContainer airbyteTestContainer; + private static final Logger LOGGER = LoggerFactory.getLogger(BasicAcceptanceTests.class); + + private static AirbyteAcceptanceTestHarness testHarness; private static AirbyteApiClient apiClient; private static UUID workspaceId; + private static PostgreSQLContainer sourcePsql; - private List sourceIds; - private List connectionIds; - private List destinationIds; - private List operationIds; - - @SuppressWarnings("UnstableApiUsage") @BeforeAll public static void init() throws URISyntaxException, IOException, InterruptedException, ApiException { - sourcePsql = new PostgreSQLContainer("postgres:13-alpine") - .withUsername(SOURCE_USERNAME) - .withPassword(SOURCE_PASSWORD); - sourcePsql.start(); - - // by default use airbyte deployment governed by a test container. - if (!USE_EXTERNAL_DEPLOYMENT) { - LOGGER.info("Using deployment of airbyte managed by test containers."); - airbyteTestContainer = new AirbyteTestContainer.Builder(new File(Resources.getResource(DOCKER_COMPOSE_FILE_NAME).toURI())) - .setEnv(MoreProperties.envFileToProperties(ENV_FILE)) - // override env VERSION to use dev to test current build of airbyte. - .setEnvVariable("VERSION", "dev") - // override to use test mounts. - .setEnvVariable("DATA_DOCKER_MOUNT", "airbyte_data_migration_test") - .setEnvVariable("DB_DOCKER_MOUNT", "airbyte_db_migration_test") - .setEnvVariable("WORKSPACE_DOCKER_MOUNT", "airbyte_workspace_migration_test") - .setEnvVariable("LOCAL_ROOT", "/tmp/airbyte_local_migration_test") - .setEnvVariable("LOCAL_DOCKER_MOUNT", "/tmp/airbyte_local_migration_test") - .build(); - airbyteTestContainer.startBlocking(); - } else { - LOGGER.info("Using external deployment of airbyte."); - } - apiClient = new AirbyteApiClient( new ApiClient().setScheme("http") .setHost("localhost") .setPort(8001) .setBasePath("/api")); - // work in whatever default workspace is present. workspaceId = apiClient.getWorkspaceApi().listWorkspaces().getWorkspaces().get(0).getWorkspaceId(); LOGGER.info("workspaceId = " + workspaceId); @@ -152,62 +83,29 @@ public static void init() throws URISyntaxException, IOException, InterruptedExc LOGGER.info("pg source definition: {}", sourceDef.getDockerImageTag()); LOGGER.info("pg destination definition: {}", destinationDef.getDockerImageTag()); - destinationPsql = new PostgreSQLContainer("postgres:13-alpine"); - destinationPsql.start(); + testHarness = new AirbyteAcceptanceTestHarness(apiClient, workspaceId); + sourcePsql = testHarness.getSourcePsql(); } @AfterAll public static void end() { - sourcePsql.stop(); - destinationPsql.stop(); - - if (airbyteTestContainer != null) { - airbyteTestContainer.stop(); - } + testHarness.stopDbAndContainers(); } @BeforeEach - public void setup() { - PostgreSQLContainerHelper.runSqlScript(MountableFile.forClasspathResource("postgres_init.sql"), sourcePsql); - - destinationPsql = new PostgreSQLContainer("postgres:13-alpine"); - destinationPsql.start(); - - sourceIds = Lists.newArrayList(); - connectionIds = Lists.newArrayList(); - destinationIds = Lists.newArrayList(); - operationIds = Lists.newArrayList(); + public void setup() throws SQLException, URISyntaxException, IOException { + testHarness.setup(); } @AfterEach public void tearDown() { - try { - clearSourceDbData(); - - for (final UUID operationId : operationIds) { - deleteOperation(operationId); - } - - for (final UUID connectionId : connectionIds) { - disableConnection(connectionId); - } - - for (final UUID sourceId : sourceIds) { - deleteSource(sourceId); - } - - for (final UUID destinationId : destinationIds) { - deleteDestination(destinationId); - } - } catch (Exception e) { - LOGGER.error("Error tearing down test fixtures:", e); - } + testHarness.cleanup(); } @Test @Order(-2) public void testGetDestinationSpec() throws ApiException { - final UUID destinationDefinitionId = getDestinationDefId(); + final UUID destinationDefinitionId = testHarness.getDestinationDefId(); final DestinationDefinitionSpecificationRead spec = apiClient.getDestinationDefinitionSpecificationApi() .getDestinationDefinitionSpecification( new DestinationDefinitionIdWithWorkspaceId().destinationDefinitionId(destinationDefinitionId).workspaceId(UUID.randomUUID())); @@ -227,7 +125,7 @@ public void testFailedGet404() { @Test @Order(0) public void testGetSourceSpec() throws ApiException { - final UUID sourceDefId = getPostgresSourceDefinitionId(); + final UUID sourceDefId = testHarness.getPostgresSourceDefinitionId(); final SourceDefinitionSpecificationRead spec = apiClient.getSourceDefinitionSpecificationApi() .getSourceDefinitionSpecification(new SourceDefinitionIdWithWorkspaceId().sourceDefinitionId(sourceDefId).workspaceId(UUID.randomUUID())); assertEquals(sourceDefId, spec.getSourceDefinitionId()); @@ -237,11 +135,11 @@ public void testGetSourceSpec() throws ApiException { @Test @Order(1) public void testCreateDestination() throws ApiException { - final UUID destinationDefId = getDestinationDefId(); - final JsonNode destinationConfig = getDestinationDbConfig(); + final UUID destinationDefId = testHarness.getDestinationDefId(); + final JsonNode destinationConfig = testHarness.getDestinationDbConfig(); final String name = "AccTestDestinationDb-" + UUID.randomUUID(); - final DestinationRead createdDestination = createDestination( + final DestinationRead createdDestination = testHarness.createDestination( name, workspaceId, destinationDefId, @@ -250,13 +148,13 @@ public void testCreateDestination() throws ApiException { assertEquals(name, createdDestination.getName()); assertEquals(destinationDefId, createdDestination.getDestinationDefinitionId()); assertEquals(workspaceId, createdDestination.getWorkspaceId()); - assertEquals(getDestinationDbConfigWithHiddenPassword(), createdDestination.getConnectionConfiguration()); + assertEquals(testHarness.getDestinationDbConfigWithHiddenPassword(), createdDestination.getConnectionConfiguration()); } @Test @Order(2) public void testDestinationCheckConnection() throws ApiException { - final UUID destinationId = createDestination().getDestinationId(); + final UUID destinationId = testHarness.createDestination().getDestinationId(); final CheckConnectionRead.StatusEnum checkOperationStatus = apiClient.getDestinationApi() .checkConnectionToDestination(new DestinationIdRequestBody().destinationId(destinationId)) @@ -269,10 +167,10 @@ public void testDestinationCheckConnection() throws ApiException { @Order(3) public void testCreateSource() throws ApiException { final String dbName = "acc-test-db"; - final UUID postgresSourceDefinitionId = getPostgresSourceDefinitionId(); - final JsonNode sourceDbConfig = getSourceDbConfig(); + final UUID postgresSourceDefinitionId = testHarness.getPostgresSourceDefinitionId(); + final JsonNode sourceDbConfig = testHarness.getSourceDbConfig(); - final SourceRead response = createSource( + final SourceRead response = testHarness.createSource( dbName, workspaceId, postgresSourceDefinitionId, @@ -290,7 +188,7 @@ public void testCreateSource() throws ApiException { @Test @Order(4) public void testSourceCheckConnection() throws ApiException { - final UUID sourceId = createPostgresSource().getSourceId(); + final UUID sourceId = testHarness.createPostgresSource().getSourceId(); final CheckConnectionRead checkConnectionRead = apiClient.getSourceApi().checkConnectionToSource(new SourceIdRequestBody().sourceId(sourceId)); @@ -303,9 +201,9 @@ public void testSourceCheckConnection() throws ApiException { @Test @Order(5) public void testDiscoverSourceSchema() throws ApiException { - final UUID sourceId = createPostgresSource().getSourceId(); + final UUID sourceId = testHarness.createPostgresSource().getSourceId(); - final AirbyteCatalog actual = discoverSourceSchema(sourceId); + final AirbyteCatalog actual = testHarness.discoverSourceSchema(sourceId); final Map> fields = ImmutableMap.of( COLUMN_ID, ImmutableMap.of("type", DataType.NUMBER), @@ -340,16 +238,17 @@ public void testDiscoverSourceSchema() throws ApiException { @Test @Order(6) public void testCreateConnection() throws ApiException { - final UUID sourceId = createPostgresSource().getSourceId(); - final AirbyteCatalog catalog = discoverSourceSchema(sourceId); - final UUID destinationId = createDestination().getDestinationId(); - final UUID operationId = createOperation().getOperationId(); + final UUID sourceId = testHarness.createPostgresSource().getSourceId(); + final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); + final UUID destinationId = testHarness.createDestination().getDestinationId(); + final UUID operationId = testHarness.createOperation().getOperationId(); final String name = "test-connection-" + UUID.randomUUID(); final ConnectionSchedule schedule = new ConnectionSchedule().timeUnit(MINUTES).units(100L); final SyncMode syncMode = SyncMode.FULL_REFRESH; final DestinationSyncMode destinationSyncMode = DestinationSyncMode.OVERWRITE; catalog.getStreams().forEach(s -> s.getConfig().syncMode(syncMode).destinationSyncMode(destinationSyncMode)); - final ConnectionRead createdConnection = createConnection(name, sourceId, destinationId, List.of(operationId), catalog, schedule); + final ConnectionRead createdConnection = + testHarness.createConnection(name, sourceId, destinationId, List.of(operationId), catalog, schedule); assertEquals(sourceId, createdConnection.getSourceId()); assertEquals(destinationId, createdConnection.getDestinationId()); @@ -363,9 +262,9 @@ public void testCreateConnection() throws ApiException { @Test @Order(7) public void testCancelSync() throws Exception { - final SourceDefinitionRead sourceDefinition = createE2eSourceDefinition(); + final SourceDefinitionRead sourceDefinition = testHarness.createE2eSourceDefinition(); - final SourceRead source = createSource( + final SourceRead source = testHarness.createSource( "E2E Test Source -" + UUID.randomUUID(), workspaceId, sourceDefinition.getSourceDefinitionId(), @@ -377,14 +276,14 @@ public void testCancelSync() throws Exception { final String connectionName = "test-connection"; final UUID sourceId = source.getSourceId(); - final UUID destinationId = createDestination().getDestinationId(); - final UUID operationId = createOperation().getOperationId(); - final AirbyteCatalog catalog = discoverSourceSchema(sourceId); + final UUID destinationId = testHarness.createDestination().getDestinationId(); + final UUID operationId = testHarness.createOperation().getOperationId(); + final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); final SyncMode syncMode = SyncMode.FULL_REFRESH; final DestinationSyncMode destinationSyncMode = DestinationSyncMode.OVERWRITE; catalog.getStreams().forEach(s -> s.getConfig().syncMode(syncMode).destinationSyncMode(destinationSyncMode)); final UUID connectionId = - createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, null).getConnectionId(); + testHarness.createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, null).getConnectionId(); final JobInfoRead connectionSyncRead = apiClient.getConnectionApi().syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); // wait to get out of PENDING @@ -399,17 +298,18 @@ public void testCancelSync() throws Exception { @Order(8) public void testScheduledSync() throws Exception { final String connectionName = "test-connection"; - final UUID sourceId = createPostgresSource().getSourceId(); - final UUID destinationId = createDestination().getDestinationId(); - final UUID operationId = createOperation().getOperationId(); - final AirbyteCatalog catalog = discoverSourceSchema(sourceId); + final UUID sourceId = testHarness.createPostgresSource().getSourceId(); + final UUID destinationId = testHarness.createDestination().getDestinationId(); + final UUID operationId = testHarness.createOperation().getOperationId(); + final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); final ConnectionSchedule connectionSchedule = new ConnectionSchedule().units(1L).timeUnit(MINUTES); final SyncMode syncMode = SyncMode.FULL_REFRESH; final DestinationSyncMode destinationSyncMode = DestinationSyncMode.OVERWRITE; catalog.getStreams().forEach(s -> s.getConfig().syncMode(syncMode).destinationSyncMode(destinationSyncMode)); - var conn = createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, connectionSchedule); + final var conn = + testHarness.createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, connectionSchedule); // When a new connection is created, Airbyte might sync it immediately (before the sync interval). // Then it will wait the sync interval. @@ -417,14 +317,14 @@ public void testScheduledSync() throws Exception { // syncs in progress List jobs = new ArrayList<>(); while (jobs.size() < 2) { - var listSyncJobsRequest = new io.airbyte.api.client.model.generated.JobListRequestBody().configTypes(List.of(JobConfigType.SYNC)) + final var listSyncJobsRequest = new io.airbyte.api.client.model.generated.JobListRequestBody().configTypes(List.of(JobConfigType.SYNC)) .configId(conn.getConnectionId().toString()); - var resp = apiClient.getJobsApi().listJobsFor(listSyncJobsRequest); + final var resp = apiClient.getJobsApi().listJobsFor(listSyncJobsRequest); jobs = resp.getJobs(); sleep(Duration.ofSeconds(30).toMillis()); } - assertSourceAndDestinationDbInSync(false); + testHarness.assertSourceAndDestinationDbInSync(false); } @Test @@ -434,19 +334,19 @@ public void testMultipleSchemasAndTablesSync() throws Exception { PostgreSQLContainerHelper.runSqlScript(MountableFile.forClasspathResource("postgres_second_schema_multiple_tables.sql"), sourcePsql); final String connectionName = "test-connection"; - final UUID sourceId = createPostgresSource().getSourceId(); - final UUID destinationId = createDestination().getDestinationId(); - final UUID operationId = createOperation().getOperationId(); - final AirbyteCatalog catalog = discoverSourceSchema(sourceId); + final UUID sourceId = testHarness.createPostgresSource().getSourceId(); + final UUID destinationId = testHarness.createDestination().getDestinationId(); + final UUID operationId = testHarness.createOperation().getOperationId(); + final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); final SyncMode syncMode = SyncMode.FULL_REFRESH; final DestinationSyncMode destinationSyncMode = DestinationSyncMode.OVERWRITE; catalog.getStreams().forEach(s -> s.getConfig().syncMode(syncMode).destinationSyncMode(destinationSyncMode)); final UUID connectionId = - createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, null).getConnectionId(); + testHarness.createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, null).getConnectionId(); final JobInfoRead connectionSyncRead = apiClient.getConnectionApi().syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead.getJob()); - assertSourceAndDestinationDbInSync(false); + testHarness.assertSourceAndDestinationDbInSync(false); } @Test @@ -456,30 +356,30 @@ public void testMultipleSchemasSameTablesSync() throws Exception { PostgreSQLContainerHelper.runSqlScript(MountableFile.forClasspathResource("postgres_separate_schema_same_table.sql"), sourcePsql); final String connectionName = "test-connection"; - final UUID sourceId = createPostgresSource().getSourceId(); - final UUID destinationId = createDestination().getDestinationId(); - final UUID operationId = createOperation().getOperationId(); - final AirbyteCatalog catalog = discoverSourceSchema(sourceId); + final UUID sourceId = testHarness.createPostgresSource().getSourceId(); + final UUID destinationId = testHarness.createDestination().getDestinationId(); + final UUID operationId = testHarness.createOperation().getOperationId(); + final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); final SyncMode syncMode = SyncMode.FULL_REFRESH; final DestinationSyncMode destinationSyncMode = DestinationSyncMode.OVERWRITE; catalog.getStreams().forEach(s -> s.getConfig().syncMode(syncMode).destinationSyncMode(destinationSyncMode)); final UUID connectionId = - createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, null).getConnectionId(); + testHarness.createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, null).getConnectionId(); final JobInfoRead connectionSyncRead = apiClient.getConnectionApi().syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead.getJob()); - assertSourceAndDestinationDbInSync(false); + testHarness.assertSourceAndDestinationDbInSync(false); } @Test @Order(11) public void testIncrementalDedupeSync() throws Exception { final String connectionName = "test-connection"; - final UUID sourceId = createPostgresSource().getSourceId(); - final UUID destinationId = createDestination().getDestinationId(); - final UUID operationId = createOperation().getOperationId(); - final AirbyteCatalog catalog = discoverSourceSchema(sourceId); + final UUID sourceId = testHarness.createPostgresSource().getSourceId(); + final UUID destinationId = testHarness.createDestination().getDestinationId(); + final UUID operationId = testHarness.createOperation().getOperationId(); + final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); final SyncMode syncMode = SyncMode.INCREMENTAL; final DestinationSyncMode destinationSyncMode = DestinationSyncMode.APPEND_DEDUP; catalog.getStreams().forEach(s -> s.getConfig() @@ -488,32 +388,32 @@ public void testIncrementalDedupeSync() throws Exception { .destinationSyncMode(destinationSyncMode) .primaryKey(List.of(List.of(COLUMN_NAME)))); final UUID connectionId = - createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, null).getConnectionId(); + testHarness.createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, null).getConnectionId(); // sync from start final JobInfoRead connectionSyncRead1 = apiClient.getConnectionApi() .syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead1.getJob()); - assertSourceAndDestinationDbInSync(true); + testHarness.assertSourceAndDestinationDbInSync(true); // add new records and run again. - final Database source = getSourceDatabase(); - final List expectedRawRecords = retrieveSourceRecords(source, STREAM_NAME); + final Database source = testHarness.getSourceDatabase(); + final List expectedRawRecords = testHarness.retrieveSourceRecords(source, STREAM_NAME); expectedRawRecords.add(Jsons.jsonNode(ImmutableMap.builder().put(COLUMN_ID, 6).put(COLUMN_NAME, "sherif").build())); expectedRawRecords.add(Jsons.jsonNode(ImmutableMap.builder().put(COLUMN_ID, 7).put(COLUMN_NAME, "chris").build())); source.query(ctx -> ctx.execute("UPDATE id_and_name SET id=6 WHERE name='sherif'")); source.query(ctx -> ctx.execute("INSERT INTO id_and_name(id, name) VALUES(7, 'chris')")); // retrieve latest snapshot of source records after modifications; the deduplicated table in // destination should mirror this latest state of records - final List expectedNormalizedRecords = retrieveSourceRecords(source, STREAM_NAME); + final List expectedNormalizedRecords = testHarness.retrieveSourceRecords(source, STREAM_NAME); final JobInfoRead connectionSyncRead2 = apiClient.getConnectionApi() .syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead2.getJob()); - assertRawDestinationContains(expectedRawRecords, new SchemaTableNamePair("public", STREAM_NAME)); - assertNormalizedDestinationContains(expectedNormalizedRecords); + testHarness.assertRawDestinationContains(expectedRawRecords, new SchemaTableNamePair("public", STREAM_NAME)); + testHarness.assertNormalizedDestinationContains(expectedNormalizedRecords); } @Test @@ -521,10 +421,10 @@ public void testIncrementalDedupeSync() throws Exception { public void testIncrementalSync() throws Exception { LOGGER.info("Starting testIncrementalSync()"); final String connectionName = "test-connection"; - final UUID sourceId = createPostgresSource().getSourceId(); - final UUID destinationId = createDestination().getDestinationId(); - final UUID operationId = createOperation().getOperationId(); - final AirbyteCatalog catalog = discoverSourceSchema(sourceId); + final UUID sourceId = testHarness.createPostgresSource().getSourceId(); + final UUID destinationId = testHarness.createDestination().getDestinationId(); + final UUID operationId = testHarness.createOperation().getOperationId(); + final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); final AirbyteStream stream = catalog.getStreams().get(0).getStream(); assertEquals(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL), stream.getSupportedSyncModes()); @@ -540,7 +440,7 @@ public void testIncrementalSync() throws Exception { .cursorField(List.of(COLUMN_ID)) .destinationSyncMode(destinationSyncMode)); final UUID connectionId = - createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, null).getConnectionId(); + testHarness.createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, null).getConnectionId(); LOGGER.info("Beginning testIncrementalSync() sync 1"); final JobInfoRead connectionSyncRead1 = apiClient.getConnectionApi() @@ -548,12 +448,12 @@ public void testIncrementalSync() throws Exception { waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead1.getJob()); LOGGER.info("state after sync 1: {}", apiClient.getConnectionApi().getState(new ConnectionIdRequestBody().connectionId(connectionId))); - assertSourceAndDestinationDbInSync(false); + testHarness.assertSourceAndDestinationDbInSync(false); // add new records and run again. - final Database source = getSourceDatabase(); + final Database source = testHarness.getSourceDatabase(); // get contents of source before mutating records. - final List expectedRecords = retrieveSourceRecords(source, STREAM_NAME); + final List expectedRecords = testHarness.retrieveSourceRecords(source, STREAM_NAME); expectedRecords.add(Jsons.jsonNode(ImmutableMap.builder().put(COLUMN_ID, 6).put(COLUMN_NAME, "geralt").build())); // add a new record source.query(ctx -> ctx.execute("INSERT INTO id_and_name(id, name) VALUES(6, 'geralt')")); @@ -568,7 +468,7 @@ public void testIncrementalSync() throws Exception { waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead2.getJob()); LOGGER.info("state after sync 2: {}", apiClient.getConnectionApi().getState(new ConnectionIdRequestBody().connectionId(connectionId))); - assertRawDestinationContains(expectedRecords, new SchemaTableNamePair("public", STREAM_NAME)); + testHarness.assertRawDestinationContains(expectedRecords, new SchemaTableNamePair("public", STREAM_NAME)); // reset back to no data. @@ -579,7 +479,7 @@ public void testIncrementalSync() throws Exception { LOGGER.info("state after reset: {}", apiClient.getConnectionApi().getState(new ConnectionIdRequestBody().connectionId(connectionId))); - assertRawDestinationContains(Collections.emptyList(), new SchemaTableNamePair("public", + testHarness.assertRawDestinationContains(Collections.emptyList(), new SchemaTableNamePair("public", STREAM_NAME)); // sync one more time. verify it is the equivalent of a full refresh. @@ -589,7 +489,7 @@ public void testIncrementalSync() throws Exception { waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead3.getJob()); LOGGER.info("state after sync 3: {}", apiClient.getConnectionApi().getState(new ConnectionIdRequestBody().connectionId(connectionId))); - assertSourceAndDestinationDbInSync(false); + testHarness.assertSourceAndDestinationDbInSync(false); } @@ -597,10 +497,10 @@ public void testIncrementalSync() throws Exception { @Order(13) public void testDeleteConnection() throws Exception { final String connectionName = "test-connection"; - final UUID sourceId = createPostgresSource().getSourceId(); - final UUID destinationId = createDestination().getDestinationId(); - final UUID operationId = createOperation().getOperationId(); - final AirbyteCatalog catalog = discoverSourceSchema(sourceId); + final UUID sourceId = testHarness.createPostgresSource().getSourceId(); + final UUID destinationId = testHarness.createDestination().getDestinationId(); + final UUID operationId = testHarness.createOperation().getOperationId(); + final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); final SyncMode syncMode = SyncMode.INCREMENTAL; final DestinationSyncMode destinationSyncMode = DestinationSyncMode.APPEND_DEDUP; catalog.getStreams().forEach(s -> s.getConfig() @@ -610,7 +510,7 @@ public void testDeleteConnection() throws Exception { .primaryKey(List.of(List.of(COLUMN_NAME)))); UUID connectionId = - createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, null).getConnectionId(); + testHarness.createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, null).getConnectionId(); final JobInfoRead connectionSyncRead = apiClient.getConnectionApi().syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); waitWhileJobHasStatus(apiClient.getJobsApi(), connectionSyncRead.getJob(), Set.of(JobStatus.RUNNING)); @@ -620,7 +520,8 @@ public void testDeleteConnection() throws Exception { apiClient.getConnectionApi().deleteConnection(new ConnectionIdRequestBody().connectionId(connectionId)); // remove connection to avoid exception during tear down - connectionIds.remove(connectionId); + // connectionIds.remove(connectionId); // todo remove + testHarness.removeConnection(connectionId); LOGGER.info("Waiting for connection to be deleted..."); Thread.sleep(500); @@ -635,9 +536,10 @@ public void testDeleteConnection() throws Exception { // test deletion of connection when temporal workflow is in a bad state LOGGER.info("Testing connection deletion when temporal is in a terminal state"); - connectionId = createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, null).getConnectionId(); + connectionId = + testHarness.createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, null).getConnectionId(); - terminateTemporalWorkflow(connectionId); + testHarness.terminateTemporalWorkflow(connectionId); // we should still be able to delete the connection when the temporal workflow is in this state apiClient.getConnectionApi().deleteConnection(new ConnectionIdRequestBody().connectionId(connectionId)); @@ -660,10 +562,10 @@ public void testUpdateConnectionWhenWorkflowUnreachable() throws Exception { // test just ensures that the underlying workflow // is running after the update method is called. final String connectionName = "test-connection"; - final UUID sourceId = createPostgresSource().getSourceId(); - final UUID destinationId = createDestination().getDestinationId(); - final UUID operationId = createOperation().getOperationId(); - final AirbyteCatalog catalog = discoverSourceSchema(sourceId); + final UUID sourceId = testHarness.createPostgresSource().getSourceId(); + final UUID destinationId = testHarness.createDestination().getDestinationId(); + final UUID operationId = testHarness.createOperation().getOperationId(); + final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); final SyncMode syncMode = SyncMode.INCREMENTAL; final DestinationSyncMode destinationSyncMode = DestinationSyncMode.APPEND_DEDUP; catalog.getStreams().forEach(s -> s.getConfig() @@ -673,17 +575,19 @@ public void testUpdateConnectionWhenWorkflowUnreachable() throws Exception { .primaryKey(List.of(List.of(COLUMN_NAME)))); LOGGER.info("Testing connection update when temporal is in a terminal state"); - final UUID connectionId = createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, null).getConnectionId(); + final UUID connectionId = + testHarness.createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, null).getConnectionId(); - terminateTemporalWorkflow(connectionId); + testHarness.terminateTemporalWorkflow(connectionId); // we should still be able to update the connection when the temporal workflow is in this state - updateConnectionSchedule(connectionId, new ConnectionSchedule().timeUnit(ConnectionSchedule.TimeUnitEnum.HOURS).units(1L)); + testHarness.updateConnectionSchedule(connectionId, + new ConnectionSchedule().timeUnit(ConnectionSchedule.TimeUnitEnum.HOURS).units(1L)); LOGGER.info("Waiting for workflow to be recreated..."); Thread.sleep(500); - final WorkflowState workflowState = getWorkflowState(connectionId); + final WorkflowState workflowState = testHarness.getWorkflowState(connectionId); assertTrue(workflowState.isRunning()); } @@ -693,8 +597,8 @@ public void testManualSyncRepairsWorkflowWhenWorkflowUnreachable() throws Except // This test only covers the specific behavior of updating a connection that does not have an // underlying temporal workflow. final String connectionName = "test-connection"; - final SourceDefinitionRead sourceDefinition = createE2eSourceDefinition(); - final SourceRead source = createSource( + final SourceDefinitionRead sourceDefinition = testHarness.createE2eSourceDefinition(); + final SourceRead source = testHarness.createSource( "E2E Test Source -" + UUID.randomUUID(), workspaceId, sourceDefinition.getSourceDefinitionId(), @@ -704,9 +608,9 @@ public void testManualSyncRepairsWorkflowWhenWorkflowUnreachable() throws Except .put("message_interval", 100) .build())); final UUID sourceId = source.getSourceId(); - final UUID destinationId = createDestination().getDestinationId(); - final UUID operationId = createOperation().getOperationId(); - final AirbyteCatalog catalog = discoverSourceSchema(sourceId); + final UUID destinationId = testHarness.createDestination().getDestinationId(); + final UUID operationId = testHarness.createOperation().getOperationId(); + final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); final SyncMode syncMode = SyncMode.INCREMENTAL; final DestinationSyncMode destinationSyncMode = DestinationSyncMode.APPEND_DEDUP; catalog.getStreams().forEach(s -> s.getConfig() @@ -716,12 +620,13 @@ public void testManualSyncRepairsWorkflowWhenWorkflowUnreachable() throws Except .primaryKey(List.of(List.of(COLUMN_NAME)))); LOGGER.info("Testing manual sync when temporal is in a terminal state"); - final UUID connectionId = createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, null).getConnectionId(); + final UUID connectionId = + testHarness.createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, null).getConnectionId(); LOGGER.info("Starting first manual sync"); final JobInfoRead firstJobInfo = apiClient.getConnectionApi().syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); LOGGER.info("Terminating workflow during first sync"); - terminateTemporalWorkflow(connectionId); + testHarness.terminateTemporalWorkflow(connectionId); LOGGER.info("Submitted another manual sync"); apiClient.getConnectionApi().syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); @@ -729,7 +634,7 @@ public void testManualSyncRepairsWorkflowWhenWorkflowUnreachable() throws Except LOGGER.info("Waiting for workflow to be recreated..."); Thread.sleep(500); - final WorkflowState workflowState = getWorkflowState(connectionId); + final WorkflowState workflowState = testHarness.getWorkflowState(connectionId); assertTrue(workflowState.isRunning()); assertTrue(workflowState.isSkipScheduling()); @@ -744,10 +649,10 @@ public void testResetConnectionRepairsWorkflowWhenWorkflowUnreachable() throws E // This test only covers the specific behavior of updating a connection that does not have an // underlying temporal workflow. final String connectionName = "test-connection"; - final UUID sourceId = createPostgresSource().getSourceId(); - final UUID destinationId = createDestination().getDestinationId(); - final UUID operationId = createOperation().getOperationId(); - final AirbyteCatalog catalog = discoverSourceSchema(sourceId); + final UUID sourceId = testHarness.createPostgresSource().getSourceId(); + final UUID destinationId = testHarness.createDestination().getDestinationId(); + final UUID operationId = testHarness.createOperation().getOperationId(); + final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); final SyncMode syncMode = SyncMode.INCREMENTAL; final DestinationSyncMode destinationSyncMode = DestinationSyncMode.APPEND_DEDUP; catalog.getStreams().forEach(s -> s.getConfig() @@ -757,18 +662,54 @@ public void testResetConnectionRepairsWorkflowWhenWorkflowUnreachable() throws E .primaryKey(List.of(List.of(COLUMN_NAME)))); LOGGER.info("Testing reset connection when temporal is in a terminal state"); - final UUID connectionId = createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, null).getConnectionId(); + final UUID connectionId = + testHarness.createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, null).getConnectionId(); - terminateTemporalWorkflow(connectionId); + testHarness.terminateTemporalWorkflow(connectionId); - apiClient.getConnectionApi().resetConnection(new ConnectionIdRequestBody().connectionId(connectionId)); + final JobInfoRead jobInfoRead = apiClient.getConnectionApi().resetConnection(new ConnectionIdRequestBody().connectionId(connectionId)); + assertEquals(JobConfigType.RESET_CONNECTION, jobInfoRead.getJob().getConfigType()); + } - LOGGER.info("Waiting for workflow to be recreated..."); - Thread.sleep(500); + @Test + @Order(17) + public void testResetCancelsRunningSync() throws Exception { + final SourceDefinitionRead sourceDefinition = testHarness.createE2eSourceDefinition(); - final WorkflowState workflowState = getWorkflowState(connectionId); - assertTrue(workflowState.isRunning()); - assertTrue(workflowState.isResetConnection()); + final SourceRead source = testHarness.createSource( + "E2E Test Source -" + UUID.randomUUID(), + workspaceId, + sourceDefinition.getSourceDefinitionId(), + Jsons.jsonNode(ImmutableMap.builder() + .put("type", "INFINITE_FEED") + .put("message_interval", 1000) + .put("max_records", Duration.ofMinutes(5).toSeconds()) + .build())); + + final String connectionName = "test-connection"; + final UUID sourceId = source.getSourceId(); + final UUID destinationId = testHarness.createDestination().getDestinationId(); + final UUID operationId = testHarness.createOperation().getOperationId(); + final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); + final SyncMode syncMode = SyncMode.FULL_REFRESH; + final DestinationSyncMode destinationSyncMode = DestinationSyncMode.OVERWRITE; + catalog.getStreams().forEach(s -> s.getConfig().syncMode(syncMode).destinationSyncMode(destinationSyncMode)); + final UUID connectionId = + testHarness.createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, null).getConnectionId(); + final JobInfoRead connectionSyncRead = apiClient.getConnectionApi().syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); + + // wait to get out of PENDING + final JobRead jobRead = waitWhileJobHasStatus(apiClient.getJobsApi(), connectionSyncRead.getJob(), Set.of(JobStatus.PENDING)); + assertEquals(JobStatus.RUNNING, jobRead.getStatus()); + + // send reset request while sync is still running + final JobInfoRead jobInfoRead = apiClient.getConnectionApi().resetConnection(new ConnectionIdRequestBody().connectionId(connectionId)); + assertEquals(JobConfigType.RESET_CONNECTION, jobInfoRead.getJob().getConfigType()); + + // verify that sync job was cancelled + final JobRead connectionSyncReadAfterReset = + apiClient.getJobsApi().getJobInfo(new JobIdRequestBody().id(connectionSyncRead.getJob().getId())).getJob(); + assertEquals(JobStatus.CANCELLED, connectionSyncReadAfterReset.getStatus()); } // This test is disabled because it takes a couple minutes to run, as it is testing timeouts. @@ -776,10 +717,10 @@ public void testResetConnectionRepairsWorkflowWhenWorkflowUnreachable() throws E // See relevant issue: https://github.com/airbytehq/airbyte/issues/8397 @Disabled public void testFailureTimeout() throws Exception { - final SourceDefinitionRead sourceDefinition = createE2eSourceDefinition(); - final DestinationDefinitionRead destinationDefinition = createE2eDestinationDefinition(); + final SourceDefinitionRead sourceDefinition = testHarness.createE2eSourceDefinition(); + final DestinationDefinitionRead destinationDefinition = testHarness.createE2eDestinationDefinition(); - final SourceRead source = createSource( + final SourceRead source = testHarness.createSource( "E2E Test Source -" + UUID.randomUUID(), workspaceId, sourceDefinition.getSourceDefinitionId(), @@ -791,7 +732,7 @@ public void testFailureTimeout() throws Exception { // Destination fails after processing 5 messages, so the job should fail after the graceful close // timeout of 1 minute - final DestinationRead destination = createDestination( + final DestinationRead destination = testHarness.createDestination( "E2E Test Destination -" + UUID.randomUUID(), workspaceId, destinationDefinition.getDestinationDefinitionId(), @@ -803,17 +744,18 @@ public void testFailureTimeout() throws Exception { final String connectionName = "test-connection"; final UUID sourceId = source.getSourceId(); final UUID destinationId = destination.getDestinationId(); - final AirbyteCatalog catalog = discoverSourceSchema(sourceId); + final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); final UUID connectionId = - createConnection(connectionName, sourceId, destinationId, Collections.emptyList(), catalog, null) + testHarness.createConnection(connectionName, sourceId, destinationId, Collections.emptyList(), catalog, null) .getConnectionId(); final JobInfoRead connectionSyncRead1 = apiClient.getConnectionApi() .syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); // wait to get out of pending. - final JobRead runningJob = waitWhileJobHasStatus(apiClient.getJobsApi(), connectionSyncRead1.getJob(), Sets.newHashSet(JobStatus.PENDING)); + final JobRead runningJob = + waitWhileJobHasStatus(apiClient.getJobsApi(), connectionSyncRead1.getJob(), Sets.newHashSet(JobStatus.PENDING)); // wait for job for max of 3 minutes, by which time the job attempt should have failed waitWhileJobHasStatus(apiClient.getJobsApi(), runningJob, Sets.newHashSet(JobStatus.RUNNING), Duration.ofMinutes(3)); @@ -830,416 +772,4 @@ public void testFailureTimeout() throws Exception { } } - private WorkflowClient getWorkflowClient() { - final WorkflowServiceStubs temporalService = TemporalUtils.createTemporalService("localhost:7233"); - return WorkflowClient.newInstance(temporalService); - } - - private WorkflowState getWorkflowState(final UUID connectionId) { - final WorkflowClient workflowCLient = getWorkflowClient(); - - // check if temporal workflow is reachable - final ConnectionManagerWorkflow connectionManagerWorkflow = - workflowCLient.newWorkflowStub(ConnectionManagerWorkflow.class, "connection_manager_" + connectionId); - - return connectionManagerWorkflow.getState(); - } - - private void terminateTemporalWorkflow(final UUID connectionId) { - final WorkflowClient workflowCLient = getWorkflowClient(); - - // check if temporal workflow is reachable - getWorkflowState(connectionId); - - // Terminate workflow - LOGGER.info("Terminating temporal workflow..."); - workflowCLient.newUntypedWorkflowStub("connection_manager_" + connectionId).terminate(""); - - // remove connection to avoid exception during tear down - connectionIds.remove(connectionId); - } - - private AirbyteCatalog discoverSourceSchema(final UUID sourceId) throws ApiException { - return apiClient.getSourceApi().discoverSchemaForSource(new SourceDiscoverSchemaRequestBody().sourceId(sourceId)).getCatalog(); - } - - private void assertSourceAndDestinationDbInSync(final boolean withScdTable) throws Exception { - final Database source = getSourceDatabase(); - - final Set sourceTables = listAllTables(source); - final Set sourceTablesWithRawTablesAdded = addAirbyteGeneratedTables(withScdTable, sourceTables); - final Database destination = getDestinationDatabase(); - final Set destinationTables = listAllTables(destination); - assertEquals(sourceTablesWithRawTablesAdded, destinationTables, - String.format("streams did not match.\n source stream names: %s\n destination stream names: %s\n", sourceTables, destinationTables)); - - for (final SchemaTableNamePair pair : sourceTables) { - final List sourceRecords = retrieveSourceRecords(source, pair.getFullyQualifiedTableName()); - assertRawDestinationContains(sourceRecords, pair); - } - } - - private Database getSourceDatabase() { - return getDatabase(sourcePsql); - } - - private Database getDestinationDatabase() { - return getDatabase(destinationPsql); - } - - private Database getDatabase(final PostgreSQLContainer db) { - return new Database(DatabaseConnectionHelper.createDslContext(db, SQLDialect.POSTGRES)); - } - - private Set listAllTables(final Database database) throws SQLException { - return database.query( - context -> { - final Result fetch = - context.fetch( - "SELECT tablename, schemaname FROM pg_catalog.pg_tables WHERE schemaname != 'pg_catalog' AND schemaname != 'information_schema'"); - return fetch.stream() - .map(record -> { - final var schemaName = (String) record.get("schemaname"); - final var tableName = (String) record.get("tablename"); - return new SchemaTableNamePair(schemaName, tableName); - }) - .collect(Collectors.toSet()); - }); - } - - private Set addAirbyteGeneratedTables(final boolean withScdTable, final Set sourceTables) { - return sourceTables.stream().flatMap(x -> { - final String cleanedNameStream = x.tableName.replace(".", "_"); - final List explodedStreamNames = new ArrayList<>(List.of( - new SchemaTableNamePair(OUTPUT_NAMESPACE_PREFIX + x.schemaName, - String.format("_airbyte_raw_%s%s", OUTPUT_STREAM_PREFIX, cleanedNameStream)), - new SchemaTableNamePair(OUTPUT_NAMESPACE_PREFIX + x.schemaName, String.format("%s%s", OUTPUT_STREAM_PREFIX, cleanedNameStream)))); - if (withScdTable) { - explodedStreamNames - .add(new SchemaTableNamePair("_airbyte_" + OUTPUT_NAMESPACE_PREFIX + x.schemaName, - String.format("%s%s_stg", OUTPUT_STREAM_PREFIX, cleanedNameStream))); - explodedStreamNames - .add(new SchemaTableNamePair(OUTPUT_NAMESPACE_PREFIX + x.schemaName, String.format("%s%s_scd", OUTPUT_STREAM_PREFIX, cleanedNameStream))); - } - return explodedStreamNames.stream(); - }).collect(Collectors.toSet()); - } - - private void assertRawDestinationContains(final List sourceRecords, final SchemaTableNamePair pair) throws Exception { - final Set destinationRecords = new HashSet<>(retrieveRawDestinationRecords(pair)); - - assertEquals(sourceRecords.size(), destinationRecords.size(), - String.format("destination contains: %s record. source contains: %s, \nsource records %s \ndestination records: %s", - destinationRecords.size(), sourceRecords.size(), sourceRecords, destinationRecords)); - - for (final JsonNode sourceStreamRecord : sourceRecords) { - assertTrue(destinationRecords.contains(sourceStreamRecord), - String.format("destination does not contain record:\n %s \n destination contains:\n %s\n", - sourceStreamRecord, destinationRecords)); - } - } - - private void assertNormalizedDestinationContains(final List sourceRecords) throws Exception { - final Database destination = getDestinationDatabase(); - final String finalDestinationTable = String.format("%spublic.%s%s", OUTPUT_NAMESPACE_PREFIX, OUTPUT_STREAM_PREFIX, STREAM_NAME.replace(".", "_")); - final List destinationRecords = retrieveSourceRecords(destination, finalDestinationTable); - - assertEquals(sourceRecords.size(), destinationRecords.size(), - String.format("destination contains: %s record. source contains: %s", sourceRecords.size(), destinationRecords.size())); - - for (final JsonNode sourceStreamRecord : sourceRecords) { - assertTrue( - destinationRecords.stream() - .anyMatch(r -> r.get(COLUMN_NAME).asText().equals(sourceStreamRecord.get(COLUMN_NAME).asText()) - && r.get(COLUMN_ID).asInt() == sourceStreamRecord.get(COLUMN_ID).asInt()), - String.format("destination does not contain record:\n %s \n destination contains:\n %s\n", sourceStreamRecord, destinationRecords)); - } - } - - private ConnectionRead createConnection(final String name, - final UUID sourceId, - final UUID destinationId, - final List operationIds, - final AirbyteCatalog catalog, - final ConnectionSchedule schedule) - throws ApiException { - final ConnectionRead connection = apiClient.getConnectionApi().createConnection( - new ConnectionCreate() - .status(ConnectionStatus.ACTIVE) - .sourceId(sourceId) - .destinationId(destinationId) - .syncCatalog(catalog) - .schedule(schedule) - .operationIds(operationIds) - .name(name) - .namespaceDefinition(NamespaceDefinitionType.CUSTOMFORMAT) - .namespaceFormat(OUTPUT_NAMESPACE) - .prefix(OUTPUT_STREAM_PREFIX)); - connectionIds.add(connection.getConnectionId()); - return connection; - } - - private ConnectionRead updateConnectionSchedule(final UUID connectionId, final ConnectionSchedule newSchedule) throws ApiException { - final ConnectionRead connectionRead = apiClient.getConnectionApi().getConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - - return apiClient.getConnectionApi().updateConnection( - new ConnectionUpdate() - .namespaceDefinition(connectionRead.getNamespaceDefinition()) - .namespaceFormat(connectionRead.getNamespaceFormat()) - .prefix(connectionRead.getPrefix()) - .connectionId(connectionId) - .operationIds(connectionRead.getOperationIds()) - .status(connectionRead.getStatus()) - .syncCatalog(connectionRead.getSyncCatalog()) - .name(connectionRead.getName()) - .resourceRequirements(connectionRead.getResourceRequirements()) - .schedule(newSchedule) // only field being updated - ); - } - - private DestinationRead createDestination() throws ApiException { - return createDestination( - "AccTestDestination-" + UUID.randomUUID(), - workspaceId, - getDestinationDefId(), - getDestinationDbConfig()); - } - - private DestinationRead createDestination(final String name, final UUID workspaceId, final UUID destinationDefId, final JsonNode destinationConfig) - throws ApiException { - final DestinationRead destination = - apiClient.getDestinationApi().createDestination(new DestinationCreate() - .name(name) - .connectionConfiguration(Jsons.jsonNode(destinationConfig)) - .workspaceId(workspaceId) - .destinationDefinitionId(destinationDefId)); - destinationIds.add(destination.getDestinationId()); - return destination; - } - - private OperationRead createOperation() throws ApiException { - final OperatorConfiguration normalizationConfig = new OperatorConfiguration() - .operatorType(OperatorType.NORMALIZATION).normalization(new OperatorNormalization().option( - OperatorNormalization.OptionEnum.BASIC)); - - final OperationCreate operationCreate = new OperationCreate() - .workspaceId(workspaceId) - .name("AccTestDestination-" + UUID.randomUUID()).operatorConfiguration(normalizationConfig); - - final OperationRead operation = apiClient.getOperationApi().createOperation(operationCreate); - operationIds.add(operation.getOperationId()); - return operation; - } - - private UUID getDestinationDefId() throws ApiException { - return apiClient.getDestinationDefinitionApi().listDestinationDefinitions().getDestinationDefinitions() - .stream() - .filter(dr -> dr.getName().toLowerCase().contains("postgres")) - .findFirst() - .orElseThrow() - .getDestinationDefinitionId(); - } - - private List retrieveSourceRecords(final Database database, final String table) throws SQLException { - return database.query(context -> context.fetch(String.format("SELECT * FROM %s;", table))) - .stream() - .map(Record::intoMap) - .map(Jsons::jsonNode) - .collect(Collectors.toList()); - } - - private List retrieveDestinationRecords(final Database database, final String table) throws SQLException { - return database.query(context -> context.fetch(String.format("SELECT * FROM %s;", table))) - .stream() - .map(Record::intoMap) - .map(r -> r.get(COLUMN_NAME_DATA)) - .map(f -> (JSONB) f) - .map(JSONB::data) - .map(Jsons::deserialize) - .map(Jsons::jsonNode) - .collect(Collectors.toList()); - } - - private List retrieveRawDestinationRecords(final SchemaTableNamePair pair) throws Exception { - final Database destination = getDestinationDatabase(); - final Set namePairs = listAllTables(destination); - - final String rawStreamName = String.format("_airbyte_raw_%s%s", OUTPUT_STREAM_PREFIX, pair.tableName.replace(".", "_")); - final SchemaTableNamePair rawTablePair = new SchemaTableNamePair(OUTPUT_NAMESPACE_PREFIX + pair.schemaName, rawStreamName); - assertTrue(namePairs.contains(rawTablePair), "can't find a non-normalized version (raw) of " + rawTablePair.getFullyQualifiedTableName()); - - return retrieveDestinationRecords(destination, rawTablePair.getFullyQualifiedTableName()); - } - - private JsonNode getSourceDbConfig() { - return getDbConfig(sourcePsql, false, false); - } - - private JsonNode getDestinationDbConfig() { - return getDbConfig(destinationPsql, false, true); - } - - private JsonNode getDestinationDbConfigWithHiddenPassword() { - return getDbConfig(destinationPsql, true, true); - } - - private JsonNode getDbConfig(final PostgreSQLContainer psql, final boolean hiddenPassword, final boolean withSchema) { - try { - final Map dbConfig = localConfig(psql, hiddenPassword, withSchema); - return Jsons.jsonNode(dbConfig); - } catch (final Exception e) { - throw new RuntimeException(e); - } - } - - private Map localConfig(final PostgreSQLContainer psql, final boolean hiddenPassword, final boolean withSchema) { - final Map dbConfig = new HashMap<>(); - // don't use psql.getHost() directly since the ip we need differs depending on environment - if (IS_MAC) { - dbConfig.put("host", "host.docker.internal"); - } else { - dbConfig.put("host", "localhost"); - } - - if (hiddenPassword) { - dbConfig.put("password", "**********"); - } else { - dbConfig.put("password", psql.getPassword()); - } - - dbConfig.put("port", psql.getFirstMappedPort()); - dbConfig.put("database", psql.getDatabaseName()); - dbConfig.put("username", psql.getUsername()); - dbConfig.put("ssl", false); - - if (withSchema) { - dbConfig.put("schema", "public"); - } - return dbConfig; - } - - private SourceDefinitionRead createE2eSourceDefinition() throws ApiException { - return apiClient.getSourceDefinitionApi().createSourceDefinition(new SourceDefinitionCreate() - .name("E2E Test Source") - .dockerRepository("airbyte/source-e2e-test") - .dockerImageTag(SOURCE_E2E_TEST_CONNECTOR_VERSION) - .documentationUrl(URI.create("https://example.com"))); - } - - private DestinationDefinitionRead createE2eDestinationDefinition() throws ApiException { - return apiClient.getDestinationDefinitionApi().createDestinationDefinition(new DestinationDefinitionCreate() - .name("E2E Test Destination") - .dockerRepository("airbyte/destination-e2e-test") - .dockerImageTag(DESTINATION_E2E_TEST_CONNECTOR_VERSION) - .documentationUrl(URI.create("https://example.com"))); - } - - private SourceRead createPostgresSource() throws ApiException { - return createSource( - "acceptanceTestDb-" + UUID.randomUUID(), - workspaceId, - getPostgresSourceDefinitionId(), - getSourceDbConfig()); - } - - private SourceRead createSource(final String name, final UUID workspaceId, final UUID sourceDefId, final JsonNode sourceConfig) - throws ApiException { - final SourceRead source = apiClient.getSourceApi().createSource(new SourceCreate() - .name(name) - .sourceDefinitionId(sourceDefId) - .workspaceId(workspaceId) - .connectionConfiguration(sourceConfig)); - sourceIds.add(source.getSourceId()); - return source; - } - - private UUID getPostgresSourceDefinitionId() throws ApiException { - return apiClient.getSourceDefinitionApi().listSourceDefinitions().getSourceDefinitions() - .stream() - .filter(sourceRead -> sourceRead.getName().equalsIgnoreCase("postgres")) - .findFirst() - .orElseThrow() - .getSourceDefinitionId(); - } - - private void clearSourceDbData() throws SQLException { - final Database database = getSourceDatabase(); - final Set pairs = listAllTables(database); - for (final SchemaTableNamePair pair : pairs) { - database.query(context -> context.execute(String.format("DROP TABLE %s.%s", pair.schemaName, pair.tableName))); - } - } - - private void deleteSource(final UUID sourceId) throws ApiException { - apiClient.getSourceApi().deleteSource(new SourceIdRequestBody().sourceId(sourceId)); - } - - private void disableConnection(final UUID connectionId) throws ApiException { - final ConnectionRead connection = apiClient.getConnectionApi().getConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - final ConnectionUpdate connectionUpdate = - new ConnectionUpdate() - .prefix(connection.getPrefix()) - .connectionId(connectionId) - .operationIds(connection.getOperationIds()) - .status(ConnectionStatus.DEPRECATED) - .schedule(connection.getSchedule()) - .syncCatalog(connection.getSyncCatalog()); - apiClient.getConnectionApi().updateConnection(connectionUpdate); - } - - private void deleteDestination(final UUID destinationId) throws ApiException { - apiClient.getDestinationApi().deleteDestination(new DestinationIdRequestBody().destinationId(destinationId)); - } - - private void deleteOperation(final UUID destinationId) throws ApiException { - apiClient.getOperationApi().deleteOperation(new OperationIdRequestBody().operationId(destinationId)); - } - - private static void waitForSuccessfulJob(final JobsApi jobsApi, final JobRead originalJob) throws InterruptedException, ApiException { - final JobRead job = waitWhileJobHasStatus(jobsApi, originalJob, Sets.newHashSet(JobStatus.PENDING, JobStatus.RUNNING)); - - if (!JobStatus.SUCCEEDED.equals(job.getStatus())) { - // If a job failed during testing, show us why. - final JobIdRequestBody id = new JobIdRequestBody(); - id.setId(originalJob.getId()); - for (final AttemptInfoRead attemptInfo : jobsApi.getJobInfo(id).getAttempts()) { - LOGGER.warn("Unsuccessful job attempt " + attemptInfo.getAttempt().getId() - + " with status " + job.getStatus() + " produced log output as follows: " + attemptInfo.getLogs().getLogLines()); - } - } - assertEquals(JobStatus.SUCCEEDED, job.getStatus()); - } - - private static JobRead waitWhileJobHasStatus(final JobsApi jobsApi, final JobRead originalJob, final Set jobStatuses) - throws InterruptedException, ApiException { - return waitWhileJobHasStatus(jobsApi, originalJob, jobStatuses, Duration.ofMinutes(6)); - } - - @SuppressWarnings("BusyWait") - private static JobRead waitWhileJobHasStatus(final JobsApi jobsApi, - final JobRead originalJob, - final Set jobStatuses, - final Duration maxWaitTime) - throws InterruptedException, ApiException { - JobRead job = originalJob; - - final Instant waitStart = Instant.now(); - while (jobStatuses.contains(job.getStatus())) { - if (Duration.between(waitStart, Instant.now()).compareTo(maxWaitTime) > 0) { - LOGGER.info("Max wait time of {} has been reached. Stopping wait.", maxWaitTime); - break; - } - sleep(1000); - - job = jobsApi.getJobInfo(new JobIdRequestBody().id(job.getId())).getJob(); - LOGGER.info("waiting: job id: {} config type: {} status: {}", job.getId(), job.getConfigType(), job.getStatus()); - } - return job; - } - - public enum Type { - SOURCE, - DESTINATION - } - } diff --git a/airbyte-webapp-e2e-tests/build.gradle b/airbyte-webapp-e2e-tests/build.gradle index fc10a04ef1ad..78c39e8cc675 100644 --- a/airbyte-webapp-e2e-tests/build.gradle +++ b/airbyte-webapp-e2e-tests/build.gradle @@ -1,16 +1,15 @@ plugins { id "base" - id "com.github.node-gradle.node" version "2.2.4" + id "com.github.node-gradle.node" version "3.3.0" } -def nodeVersion = System.getenv('NODE_VERSION') ?: '16.13.0' +def nodeVersion = System.getenv('NODE_VERSION') ?: '16.15.1' node { download = true version = nodeVersion } - task e2etest(type: NpmTask) { dependsOn npmInstall // If the cypressWebappKey property has been set from the outside (see tools/bin/e2e_test.sh) diff --git a/airbyte-webapp-e2e-tests/cypress/commands/connection.ts b/airbyte-webapp-e2e-tests/cypress/commands/connection.ts index 9570ddcfe10a..e642cfb6eb95 100644 --- a/airbyte-webapp-e2e-tests/cypress/commands/connection.ts +++ b/airbyte-webapp-e2e-tests/cypress/commands/connection.ts @@ -17,7 +17,7 @@ export const createTestConnection = (sourceName: string, destinationName: string cy.get("div[data-testid='connectionName']").type("Connection name"); cy.get("div[data-testid='schedule']").click(); - cy.get("div[data-testid='manual']").click(); + cy.get("div[data-testid='Manual']").click(); cy.get("div[data-testid='namespaceDefinition']").click(); cy.get("div[data-testid='namespaceDefinition-source']").click(); diff --git a/airbyte-webapp-e2e-tests/cypress/integration/connection.spec.ts b/airbyte-webapp-e2e-tests/cypress/integration/connection.spec.ts index 5a579ce27852..d4a938505ff8 100644 --- a/airbyte-webapp-e2e-tests/cypress/integration/connection.spec.ts +++ b/airbyte-webapp-e2e-tests/cypress/integration/connection.spec.ts @@ -28,7 +28,7 @@ describe("Connection main actions", () => { cy.get("div[data-id='replication-step']").click(); cy.get("div[data-testid='schedule']").click(); - cy.get("div[data-testid='Every 5 min']").click(); + cy.get("div[data-testid='Every 5 minutes']").click(); cy.get("button[type=submit]").first().click(); cy.wait("@updateConnection"); cy.get("span[data-id='success-result']").should("exist"); diff --git a/airbyte-webapp/.eslintrc b/airbyte-webapp/.eslintrc index f132a520cbe5..26fa893f823f 100644 --- a/airbyte-webapp/.eslintrc +++ b/airbyte-webapp/.eslintrc @@ -4,9 +4,10 @@ "plugin:@typescript-eslint/recommended", "plugin:jest/recommended", "prettier", - "plugin:prettier/recommended" + "plugin:prettier/recommended", + "plugin:css-modules/recommended" ], - "plugins": ["react", "@typescript-eslint", "prettier", "unused-imports"], + "plugins": ["react", "@typescript-eslint", "prettier", "unused-imports", "css-modules"], "parserOptions": { "ecmaVersion": 2020, "sourceType": "module", @@ -15,11 +16,29 @@ } }, "rules": { - "curly": "error", - "prettier/prettier": "error", - "unused-imports/no-unused-imports": "error", + "curly": "warn", + "css-modules/no-undef-class": ["warn", { "camelCase": true }], + "css-modules/no-unused-class": ["warn", { "camelCase": true }], + "dot-location": "warn", + "eqeqeq": "error", + "prettier/prettier": "warn", + "unused-imports/no-unused-imports": "warn", + "no-else-return": "warn", + "no-lonely-if": "warn", + "no-inner-declarations": "off", + "no-unused-vars": "off", + "no-useless-computed-key": "warn", + "no-useless-return": "warn", + "no-var": "warn", + "object-shorthand": ["warn", "always"], + "prefer-arrow-callback": "warn", + "prefer-const": "warn", + "prefer-destructuring": ["warn", { "AssignmentExpression": { "array": true } }], + "prefer-object-spread": "warn", + "prefer-template": "warn", + "yoda": "warn", "import/order": [ - "error", + "warn", { "newlines-between": "always", "groups": ["type", "builtin", "external", "internal", ["parent", "sibling"], "index"], @@ -41,15 +60,29 @@ } } ], + "@typescript-eslint/array-type": ["warn", { "default": "array-simple" }], "@typescript-eslint/ban-ts-comment": [ "warn", { - "ts-ignore": "allow-with-description", "ts-expect-error": "allow-with-description" } ], - "@typescript-eslint/consistent-type-definitions": ["error", "interface"], - "@typescript-eslint/ban-types": ["warn"] + "@typescript-eslint/ban-types": "warn", + "@typescript-eslint/consistent-indexed-object-style": ["warn", "record"], + "@typescript-eslint/consistent-type-definitions": ["warn", "interface"], + "@typescript-eslint/no-unused-vars": "warn", + "react/function-component-definition": [ + "warn", + { + "namedComponents": "arrow-function", + "unnamedComponents": "arrow-function" + } + ], + "react/jsx-boolean-value": "warn", + "react/jsx-curly-brace-presence": "warn", + "react/jsx-fragments": "warn", + "react/jsx-no-useless-fragment": ["warn", { "allowExpressions": true }], + "react/self-closing-comp": "warn" }, "parser": "@typescript-eslint/parser", "overrides": [ diff --git a/airbyte-webapp/.nvmrc b/airbyte-webapp/.nvmrc new file mode 100644 index 000000000000..112a2eaed3ee --- /dev/null +++ b/airbyte-webapp/.nvmrc @@ -0,0 +1 @@ +lts/gallium \ No newline at end of file diff --git a/airbyte-webapp/.storybook/main.ts b/airbyte-webapp/.storybook/main.js similarity index 94% rename from airbyte-webapp/.storybook/main.ts rename to airbyte-webapp/.storybook/main.js index 9d87a917494c..6c9c2e02482d 100644 --- a/airbyte-webapp/.storybook/main.ts +++ b/airbyte-webapp/.storybook/main.js @@ -9,6 +9,7 @@ module.exports = { "@storybook/preset-create-react-app", "storybook-addon-mock/register", ], + staticDirs: ["../public"], webpackFinal: (config) => { config.resolve.modules.push(process.cwd() + "/node_modules"); config.resolve.modules.push(process.cwd() + "/src"); diff --git a/airbyte-webapp/.storybook/withProvider.tsx b/airbyte-webapp/.storybook/withProvider.tsx index 9d7e1e7a6505..fe0b3523ebaf 100644 --- a/airbyte-webapp/.storybook/withProvider.tsx +++ b/airbyte-webapp/.storybook/withProvider.tsx @@ -11,6 +11,7 @@ import GlobalStyle from "../src/global-styles"; import messages from "../src/locales/en.json"; import { FeatureService } from "../src/hooks/services/Feature"; import { ConfigServiceProvider, defaultConfig } from "../src/config"; +import { DocumentationPanelProvider } from "../src/views/Connector/ConnectorDocumentationLayout/DocumentationPanelContext"; import { ServicesProvider } from "../src/core/servicesProvider"; import { analyticsServiceContext, @@ -47,11 +48,13 @@ export const withProviders = (getStory) => ( - - - {getStory()} - + > + + + + {getStory()} + + diff --git a/airbyte-webapp/Dockerfile b/airbyte-webapp/Dockerfile index 92941248d774..45a5ba51cf43 100644 --- a/airbyte-webapp/Dockerfile +++ b/airbyte-webapp/Dockerfile @@ -4,5 +4,4 @@ FROM ${NGINX_IMAGE} as webapp EXPOSE 80 COPY bin/build /usr/share/nginx/html -COPY bin/docs /usr/share/nginx/html/docs COPY bin/nginx/default.conf.template /etc/nginx/templates/default.conf.template diff --git a/airbyte-webapp/STYLEGUIDE.md b/airbyte-webapp/STYLEGUIDE.md new file mode 100644 index 000000000000..f632849240cc --- /dev/null +++ b/airbyte-webapp/STYLEGUIDE.md @@ -0,0 +1,48 @@ +# Frontend Style Guide + +This serves as a living document regarding conventions we have agreed upon as a frontend team. In general, the aim of these decisions and discussions is to both (a) increase the readability and consistency of our code and (b) decrease day to day decision-making so we can spend more time writing better code. + +## General Code Style and Formatting + +* Where possible, we rely on automated systems to maintain consistency in code style +* We use eslint, Prettier, and VSCode settings to automate these choices. The configuration files for these are checked into our repository, so no individual setup should be required beyond ensuring your VSCode settings include: + +``` +"editor.codeActionsOnSave": { + "source.fixAll.eslint": true, +} +``` + +* Donā€™t use single-character names. Using meaningful name for function parameters is a way of making the code self-documented and we always should do it. Example: + * .filter(([key, value]) => isDefined(value.default) āœ… + * .filter(([k, v]) => isDefined(v.default) āŒ + + +## Exporting + +* Export at declaration, not at the bottom. For example: + * export const myVar āœ… + * const myVar; export { myVar }; āŒ + + +## Component Props +* Use explicit, verbose naming + * ie: `interface ConnectionFormProps` not `interface iProps` + + +## Testing + +* Test files should be store alongside the files/features they are testing +* Use the prop `data-testid` instead of `data-id` + + +## Types + +* For component props, prefer type unions over enums: + * `type SomeType = ā€œsomeā€ | ā€œtypeā€;` āœ… + * `enum SomeEnum = { SOME: ā€œsomeā€, TYPE: ā€œtypeā€ };` āŒ + * Exceptions may include: + * Generated using enums from the API + * When the value on an enum is cleaner than the string + * In this case use `const enum` instead + diff --git a/airbyte-webapp/build.gradle b/airbyte-webapp/build.gradle index 8aa67c7c3c34..5ae9d93f5956 100644 --- a/airbyte-webapp/build.gradle +++ b/airbyte-webapp/build.gradle @@ -1,9 +1,19 @@ plugins { id "base" - id "com.github.node-gradle.node" version "3.1.1" + id "com.github.node-gradle.node" version "3.3.0" } -def nodeVersion = System.getenv('NODE_VERSION') ?: '16.13.0' +def nodeVersion = System.getenv('NODE_VERSION') ?: '16.15.1' + +// This array should contain a path to all configs that are common to most build tasks and +// might affect them (i.e. if any of those files change we want to rerun most tasks) +def commonConfigs = [ + '.env', + 'package.json', + 'package-lock.json', + 'tsconfig.json', + '.prettierrc.js' +] node { download = true @@ -11,70 +21,79 @@ node { } npm_run_build { - inputs.files fileTree('public') - inputs.files fileTree('src') - inputs.file 'package.json' - inputs.file 'package-lock.json' + inputs.files commonConfigs + inputs.file '.eslintrc' + inputs.dir 'public' + inputs.dir 'src' - // todo (cgardens) - the plugin seems to ignore this value when the copy command is run. ideally the output would be place in build/app. - outputs.dir project.buildDir + outputs.dir 'build/app' } task test(type: NpmTask) { dependsOn assemble args = ['run', 'test', '--', '--watchAll=false', '--silent'] - inputs.files fileTree('src') - inputs.file 'package.json' - inputs.file 'package-lock.json' + inputs.files commonConfigs + inputs.dir 'src' } task licenseCheck(type: NpmTask) { dependsOn npmInstall args = ['run', 'license-check'] - inputs.file 'package.json' - inputs.file 'package-lock.json' + inputs.files commonConfigs + inputs.file 'scripts/license-check.js' + + // The licenseCheck has no outputs, thus we always treat the outpus up to date + // as long as the inputs have not changed + outputs.upToDateWhen { true } } task validateLinks(type: NpmTask) { dependsOn npmInstall args = ['run', 'validate-links'] - inputs.file 'package.json' - inputs.file 'package-lock.json' + + // Since the output of this task depends on availability of URLs + // we never want to treat it as "up-to-date" and always want to run it + outputs.upToDateWhen { false } } -// Make sure to always run a license check after we installed dependencies -npmInstall.finalizedBy licenseCheck -// Validate all links after installing dependencies -npmInstall.finalizedBy validateLinks -assemble.dependsOn npm_run_build -build.finalizedBy test +task buildStorybook(type: NpmTask) { + dependsOn npmInstall + args = ['run', 'build:storybook'] -task copyBuild(type: Copy) { - dependsOn copyDocker + inputs.files commonConfigs + inputs.dir '.storybook' + inputs.dir 'public' + inputs.dir 'src' - from "${project.projectDir}/build" - into "build/docker/bin/build" - exclude ".docker" - exclude "docker" + outputs.dir 'build/storybook' +} + +task copyBuildOutput(type: Copy) { + dependsOn copyDocker, npm_run_build + + from "${project.projectDir}/build/app" + into 'build/docker/bin/build' } task copyDocs(type: Copy) { - dependsOn copyDocker + dependsOn copyDocker, copyBuildOutput from "${project.rootProject.projectDir}/docs/integrations" - into "build/docker/bin/docs/integrations" + into "build/docker/bin/build/docs/integrations" + // google-ads.md is blocked by Ad Blockers + rename ('google-ads.md', 'gglad.md') duplicatesStrategy DuplicatesStrategy.INCLUDE } // Copy images that are used in .md integration documentation docs -task copyAssets(type: Copy) { - dependsOn copyDocker +task copyDocAssets(type: Copy) { + dependsOn copyDocker, copyBuildOutput from "${project.rootProject.projectDir}/docs/.gitbook" - into "build/docker/bin/docs/.gitbook" + into "build/docker/bin/build/docs/.gitbook" duplicatesStrategy DuplicatesStrategy.INCLUDE } @@ -85,16 +104,15 @@ task copyNginx(type: Copy) { into "build/docker/bin/nginx" } -copyBuild.dependsOn npm_run_build -copyNginx.dependsOn npm_run_build -copyDocs.dependsOn npm_run_build -copyAssets.dependsOn npm_run_build -assemble.dependsOn copyDocs -copyDocker.dependsOn(npm_run_build) +// Those tasks should be run as part of the "check" task +check.dependsOn validateLinks, licenseCheck, test + +build.dependsOn buildStorybook Task dockerBuildTask = getDockerBuildTask("webapp", "$project.projectDir", "$rootProject.ext.version", "$rootProject.ext.image_tag") -dockerBuildTask.dependsOn(copyBuild) +dockerBuildTask.dependsOn(copyDocker) +dockerBuildTask.dependsOn(copyBuildOutput) dockerBuildTask.dependsOn(copyNginx) dockerBuildTask.dependsOn(copyDocs) -dockerBuildTask.dependsOn(copyAssets) +dockerBuildTask.dependsOn(copyDocAssets) assemble.dependsOn(dockerBuildTask) diff --git a/airbyte-webapp/nginx/default.conf.template b/airbyte-webapp/nginx/default.conf.template index c6f0d22d32f6..f14f47d41ce7 100644 --- a/airbyte-webapp/nginx/default.conf.template +++ b/airbyte-webapp/nginx/default.conf.template @@ -10,7 +10,7 @@ server { #charset koi8-r; #access_log /var/log/nginx/host.access.log main; - add_header Content-Security-Policy "script-src * 'unsafe-inline';"; + add_header Content-Security-Policy "script-src * 'unsafe-inline'; worker-src self blob:;"; location / { root /usr/share/nginx/html; diff --git a/airbyte-webapp/package-lock.json b/airbyte-webapp/package-lock.json index 26b8149220af..b69a9bbb1814 100644 --- a/airbyte-webapp/package-lock.json +++ b/airbyte-webapp/package-lock.json @@ -1,12 +1,12 @@ { "name": "airbyte-webapp", - "version": "0.39.17-alpha", + "version": "0.39.28-alpha", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "airbyte-webapp", - "version": "0.39.17-alpha", + "version": "0.39.28-alpha", "dependencies": { "@fortawesome/fontawesome-svg-core": "^6.1.1", "@fortawesome/free-brands-svg-icons": "^6.1.1", @@ -14,6 +14,7 @@ "@fortawesome/free-solid-svg-icons": "^6.1.1", "@fortawesome/react-fontawesome": "^0.1.18", "@fullstory/browser": "^1.5.1", + "@monaco-editor/react": "^4.4.5", "@sentry/react": "^6.19.6", "@sentry/tracing": "^6.19.6", "classnames": "^2.3.1", @@ -21,6 +22,7 @@ "firebase": "^9.8.2", "flat": "^5.0.2", "formik": "^2.2.9", + "framer-motion": "^6.3.11", "launchdarkly-js-client-sdk": "^2.22.1", "lodash": "^4.17.21", "query-string": "^6.13.1", @@ -31,7 +33,6 @@ "react-intl": "^5.24.8", "react-lazylog": "^4.5.3", "react-markdown": "^7.0.1", - "react-pose": "^4.0.10", "react-query": "^3.39.1", "react-reflex": "^4.0.9", "react-router-dom": "^6.3.0", @@ -82,6 +83,7 @@ "@typescript-eslint/parser": "^5.27.1", "eslint-config-prettier": "^8.5.0", "eslint-config-react-app": "^7.0.1", + "eslint-plugin-css-modules": "^2.11.0", "eslint-plugin-jest": "^26.5.3", "eslint-plugin-prettier": "^4.0.0", "eslint-plugin-unused-imports": "^2.0.0", @@ -2394,17 +2396,19 @@ "integrity": "sha512-kBJtf7PH6aWwZ6fka3zQ0p6SBYzx4fl1LoZXE2RrnYST9Xljm7WfKJrU4g/Xr3Beg72MLrp1AWNUmuYJTL7Cow==" }, "node_modules/@emotion/is-prop-valid": { - "version": "0.7.3", - "resolved": "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-0.7.3.tgz", - "integrity": "sha512-uxJqm/sqwXw3YPA5GXX365OBcJGFtxUVkB6WyezqFHlNe9jqUWH5ur2O2M8dGBz61kn1g3ZBlzUunFQXQIClhA==", + "version": "0.8.8", + "resolved": "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-0.8.8.tgz", + "integrity": "sha512-u5WtneEAr5IDG2Wv65yhunPSMLIpuKsbuOktRojfrEiEvRyC85LgPMZI63cr7NUqT8ZIGdSVg8ZKGxIug4lXcA==", + "optional": true, "dependencies": { - "@emotion/memoize": "0.7.1" + "@emotion/memoize": "0.7.4" } }, "node_modules/@emotion/memoize": { - "version": "0.7.1", - "resolved": "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.7.1.tgz", - "integrity": "sha512-Qv4LTqO11jepd5Qmlp3M1YEjBumoTHcHFdgPTQ+sFlIL5myi/7xu/POwP7IRu6odBdmLXdtIs1D6TuW6kbwbbg==" + "version": "0.7.4", + "resolved": "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.7.4.tgz", + "integrity": "sha512-Ja/Vfqe3HpuzRsG1oBtWTHk2PGZ7GR+2Vz5iYGelAw8dx32K0y7PjVuxK6z1nMpZOqAFsRUPCkK1YjJ56qJlgw==", + "optional": true }, "node_modules/@emotion/react": { "version": "11.4.0", @@ -2567,14 +2571,14 @@ "dev": true }, "node_modules/@firebase/analytics": { - "version": "0.7.9", - "resolved": "https://registry.npmjs.org/@firebase/analytics/-/analytics-0.7.9.tgz", - "integrity": "sha512-h/2L2q4/+mmV9EdvVC3XwFFbKSh8bvaYu4DMJIKnPAuGze6W5ALBLkK2GcVti6Kz1NTMJ3puxTRWE9XxRGZipQ==", - "dependencies": { - "@firebase/component": "0.5.14", - "@firebase/installations": "0.5.9", - "@firebase/logger": "0.3.2", - "@firebase/util": "1.6.0", + "version": "0.7.10", + "resolved": "https://registry.npmjs.org/@firebase/analytics/-/analytics-0.7.10.tgz", + "integrity": "sha512-efZ9jdzTW1/COE5gVdJVdplsltooKPH7M3XpSi/kDyegR1sC05C5NQaiBIYcaTyX2yf1OVcCfsWEcZFhhPTPGw==", + "dependencies": { + "@firebase/component": "0.5.15", + "@firebase/installations": "0.5.10", + "@firebase/logger": "0.3.3", + "@firebase/util": "1.6.1", "tslib": "^2.1.0" }, "peerDependencies": { @@ -2582,14 +2586,14 @@ } }, "node_modules/@firebase/analytics-compat": { - "version": "0.1.10", - "resolved": "https://registry.npmjs.org/@firebase/analytics-compat/-/analytics-compat-0.1.10.tgz", - "integrity": "sha512-7zfB+BBO5RbF7RSHOA4ZPyLvOEEvMOhRbfIjh5ZmizAQY2J6tZB8t+dwQ/q4hqZVGgw4ds4g0JYuRKZKYsWADg==", + "version": "0.1.11", + "resolved": "https://registry.npmjs.org/@firebase/analytics-compat/-/analytics-compat-0.1.11.tgz", + "integrity": "sha512-Jx5iXM3nlMa6utqGWNDtmdIztFhLCqMx2Iw809BbynhTSa3esF4e5RevCRk+5oDDfW11uLHckLpe6MhmINKIkA==", "dependencies": { - "@firebase/analytics": "0.7.9", + "@firebase/analytics": "0.7.10", "@firebase/analytics-types": "0.7.0", - "@firebase/component": "0.5.14", - "@firebase/util": "1.6.0", + "@firebase/component": "0.5.15", + "@firebase/util": "1.6.1", "tslib": "^2.1.0" }, "peerDependencies": { @@ -2612,25 +2616,25 @@ "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" }, "node_modules/@firebase/app": { - "version": "0.7.25", - "resolved": "https://registry.npmjs.org/@firebase/app/-/app-0.7.25.tgz", - "integrity": "sha512-OemDA3NZS1oEbAPFlWHeVI8Od26ZHAXUivUWFYIsYrw+YjS7FloltwyHB06Q8LQyPJIBPubGkEuzNTHz32EDCQ==", + "version": "0.7.26", + "resolved": "https://registry.npmjs.org/@firebase/app/-/app-0.7.26.tgz", + "integrity": "sha512-FmJ4uaUyazmOZZWJO9OviKfnw+lrwMPQbWBMutymSQT8Gx783Ddnhs5IdmfV0NeLrlGy4ZwfP6/+RJyy2wGDXw==", "dependencies": { - "@firebase/component": "0.5.14", - "@firebase/logger": "0.3.2", - "@firebase/util": "1.6.0", + "@firebase/component": "0.5.15", + "@firebase/logger": "0.3.3", + "@firebase/util": "1.6.1", "idb": "7.0.1", "tslib": "^2.1.0" } }, "node_modules/@firebase/app-check": { - "version": "0.5.8", - "resolved": "https://registry.npmjs.org/@firebase/app-check/-/app-check-0.5.8.tgz", - "integrity": "sha512-DgrXnrJT0S5csa5CsvmWWSWqy61T3rOE2iZ/L4Q8+xZsjU2McpUj8g/lU8NDa4qc5mGRZ/Qjozqog1H3pwPgGw==", + "version": "0.5.9", + "resolved": "https://registry.npmjs.org/@firebase/app-check/-/app-check-0.5.9.tgz", + "integrity": "sha512-IxOSpw4cL6fQD2AGLhXHxsdCjzQEYGyRwvS2vtguMxTYhRQ/EWXvej+P42cXf373vDrmAMKrnIUgC4P1yMPLSA==", "dependencies": { - "@firebase/component": "0.5.14", - "@firebase/logger": "0.3.2", - "@firebase/util": "1.6.0", + "@firebase/component": "0.5.15", + "@firebase/logger": "0.3.3", + "@firebase/util": "1.6.1", "tslib": "^2.1.0" }, "peerDependencies": { @@ -2638,15 +2642,15 @@ } }, "node_modules/@firebase/app-check-compat": { - "version": "0.2.8", - "resolved": "https://registry.npmjs.org/@firebase/app-check-compat/-/app-check-compat-0.2.8.tgz", - "integrity": "sha512-EAqFa0juE2xc52IGh2nv8E+avTLsZfbO7fkJnhPu07e5FU39pptcsRckTdHU7v1/DuWuigUVFcOD5iic9I8TQw==", + "version": "0.2.9", + "resolved": "https://registry.npmjs.org/@firebase/app-check-compat/-/app-check-compat-0.2.9.tgz", + "integrity": "sha512-DgHCcUR3vC3KrAQccs+cggTjNusF/oxPJmw1397H0jw5vWVu0oTtmIduyKB2GE0KDo0q0bHNPPR8GEVugjeFPg==", "dependencies": { - "@firebase/app-check": "0.5.8", + "@firebase/app-check": "0.5.9", "@firebase/app-check-types": "0.4.0", - "@firebase/component": "0.5.14", - "@firebase/logger": "0.3.2", - "@firebase/util": "1.6.0", + "@firebase/component": "0.5.15", + "@firebase/logger": "0.3.3", + "@firebase/util": "1.6.1", "tslib": "^2.1.0" }, "peerDependencies": { @@ -2674,14 +2678,14 @@ "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" }, "node_modules/@firebase/app-compat": { - "version": "0.1.26", - "resolved": "https://registry.npmjs.org/@firebase/app-compat/-/app-compat-0.1.26.tgz", - "integrity": "sha512-i5UTq1HZAHuhe7RNjgFSezbow4jVxc2oe3Gndsv+Hdut92f8L0AyssOtdU2iOylLlxbTijewAXXui4FAUzXubw==", - "dependencies": { - "@firebase/app": "0.7.25", - "@firebase/component": "0.5.14", - "@firebase/logger": "0.3.2", - "@firebase/util": "1.6.0", + "version": "0.1.27", + "resolved": "https://registry.npmjs.org/@firebase/app-compat/-/app-compat-0.1.27.tgz", + "integrity": "sha512-0A5ENP/KK0Eev94qPuxaclfOE0oA6hyCVQTdi0ox1bPm+VzGGD/jXP6Bzw+IUmy33ChjP/639bm6Myh8AG4PwA==", + "dependencies": { + "@firebase/app": "0.7.26", + "@firebase/component": "0.5.15", + "@firebase/logger": "0.3.3", + "@firebase/util": "1.6.1", "tslib": "^2.1.0" } }, @@ -2701,13 +2705,13 @@ "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" }, "node_modules/@firebase/auth": { - "version": "0.20.2", - "resolved": "https://registry.npmjs.org/@firebase/auth/-/auth-0.20.2.tgz", - "integrity": "sha512-anv2dhHXnlHSuXDuXIoCm/w/JJ+SiQ1TAKgNVYlhfq+yvx9Op8CxfTqcfBwfbIZ1gizw4PNLuk82m8KelsKl6Q==", + "version": "0.20.3", + "resolved": "https://registry.npmjs.org/@firebase/auth/-/auth-0.20.3.tgz", + "integrity": "sha512-iElaZvVxxW2WAAmmqwTkdPBdixdI2TpURACwNn0G4XpuxlNeF3hYK1nDla2Oa/r39QGtlb9FChTTBby4Uu/Flw==", "dependencies": { - "@firebase/component": "0.5.14", - "@firebase/logger": "0.3.2", - "@firebase/util": "1.6.0", + "@firebase/component": "0.5.15", + "@firebase/logger": "0.3.3", + "@firebase/util": "1.6.1", "node-fetch": "2.6.7", "selenium-webdriver": "4.1.2", "tslib": "^2.1.0" @@ -2717,14 +2721,14 @@ } }, "node_modules/@firebase/auth-compat": { - "version": "0.2.15", - "resolved": "https://registry.npmjs.org/@firebase/auth-compat/-/auth-compat-0.2.15.tgz", - "integrity": "sha512-Kl8pujKWVBJ+76h4tRsS5xI9Dvk8MVSP6eN82rnEgmCxiUsnVj5Adb/WzvS3p4/l++4mRSAEnlIVxZ2Pyaeirg==", + "version": "0.2.16", + "resolved": "https://registry.npmjs.org/@firebase/auth-compat/-/auth-compat-0.2.16.tgz", + "integrity": "sha512-wwyuBwtCXwygr1Vyr7M4v8iD1eGRUEGM0XNGG2BQkFnlF7rkwpGsmgiiSkaA8kFYibNSTx2TkdBNfvJXzYPL6A==", "dependencies": { - "@firebase/auth": "0.20.2", + "@firebase/auth": "0.20.3", "@firebase/auth-types": "0.11.0", - "@firebase/component": "0.5.14", - "@firebase/util": "1.6.0", + "@firebase/component": "0.5.15", + "@firebase/util": "1.6.1", "node-fetch": "2.6.7", "selenium-webdriver": "4.1.2", "tslib": "^2.1.0" @@ -2762,11 +2766,11 @@ "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" }, "node_modules/@firebase/component": { - "version": "0.5.14", - "resolved": "https://registry.npmjs.org/@firebase/component/-/component-0.5.14.tgz", - "integrity": "sha512-ct2p1MTMV5P/nGIlkC3XjAVwHwjsIZaeo8JVyDAkJCNTROu5mYX3FBK16hjIUIIVJDpgnnzFh9nP74gciL4WrA==", + "version": "0.5.15", + "resolved": "https://registry.npmjs.org/@firebase/component/-/component-0.5.15.tgz", + "integrity": "sha512-VRnZxmvtJmXupTPg37LxM0zdyMN54EXkmsFD4x5Bm4eZUay9VGnhfiGnE3m9Af/2hnURA2idIBN/23L6982iPQ==", "dependencies": { - "@firebase/util": "1.6.0", + "@firebase/util": "1.6.1", "tslib": "^2.1.0" } }, @@ -2776,32 +2780,29 @@ "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" }, "node_modules/@firebase/database": { - "version": "0.13.0", - "resolved": "https://registry.npmjs.org/@firebase/database/-/database-0.13.0.tgz", - "integrity": "sha512-lskyf5+FDnytrPJt3MLjkTDxYxutKtaYL7j/Z/De2DSVZJSR+weE/D/r47iK/+tyzMaew2v3joSgZOHvVlWshw==", + "version": "0.13.1", + "resolved": "https://registry.npmjs.org/@firebase/database/-/database-0.13.1.tgz", + "integrity": "sha512-k6PeAzf9x9DG3AJtA6SkJsTD1ivOWvrV71VPOYabBch05QDB0HOYs1EauGhzqa6GOcYz+ncb4pNEkgFDvcnEfQ==", "dependencies": { "@firebase/auth-interop-types": "0.1.6", - "@firebase/component": "0.5.14", - "@firebase/logger": "0.3.2", - "@firebase/util": "1.6.0", + "@firebase/component": "0.5.15", + "@firebase/logger": "0.3.3", + "@firebase/util": "1.6.1", "faye-websocket": "0.11.4", "tslib": "^2.1.0" } }, "node_modules/@firebase/database-compat": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/@firebase/database-compat/-/database-compat-0.2.0.tgz", - "integrity": "sha512-t2HVI1RrMz8cbmhyo2LQGSInhRN9DZTDKXm55iFQgSihcnCbfoMAFyRv/FFa1Y+iERgcDI8LaOMS/LTjpYVz4g==", - "dependencies": { - "@firebase/component": "0.5.14", - "@firebase/database": "0.13.0", - "@firebase/database-types": "0.9.8", - "@firebase/logger": "0.3.2", - "@firebase/util": "1.6.0", + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/@firebase/database-compat/-/database-compat-0.2.1.tgz", + "integrity": "sha512-xpru5ZtO7um2FmfIw4gCAbkWpyOEwxzamU/5phuwze3ZihMdh+UrDrwrhvfqzQ/KIKXsK76Uyx5F3NCAS8+5eg==", + "dependencies": { + "@firebase/component": "0.5.15", + "@firebase/database": "0.13.1", + "@firebase/database-types": "0.9.9", + "@firebase/logger": "0.3.3", + "@firebase/util": "1.6.1", "tslib": "^2.1.0" - }, - "peerDependencies": { - "@firebase/app-compat": "0.x" } }, "node_modules/@firebase/database-compat/node_modules/tslib": { @@ -2810,12 +2811,12 @@ "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" }, "node_modules/@firebase/database-types": { - "version": "0.9.8", - "resolved": "https://registry.npmjs.org/@firebase/database-types/-/database-types-0.9.8.tgz", - "integrity": "sha512-bI7bwF5xc0nPi6Oa3JVt6JJdfhVAnEpCwgfTNILR4lYDPtxdxlRXhZzQ5lfqlCj7PR+drKh9RvMu6C24N1q04w==", + "version": "0.9.9", + "resolved": "https://registry.npmjs.org/@firebase/database-types/-/database-types-0.9.9.tgz", + "integrity": "sha512-Zp86fHzQFZKYVM7yDWVAgVTeOJ39g2wT0ijeiN0jpHAHceeoV013q3jPIIGuooV2HMwWOTIBZGqh+DxrHMFyUw==", "dependencies": { "@firebase/app-types": "0.7.0", - "@firebase/util": "1.6.0" + "@firebase/util": "1.6.1" } }, "node_modules/@firebase/database/node_modules/faye-websocket": { @@ -2835,14 +2836,14 @@ "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" }, "node_modules/@firebase/firestore": { - "version": "3.4.9", - "resolved": "https://registry.npmjs.org/@firebase/firestore/-/firestore-3.4.9.tgz", - "integrity": "sha512-EiSG/uYDyUmrrHlwrsP9WqWI8ChD0hUW/+0MS3NDh8Cfo1Dfb/sM3YWKzgnIZ3wKTxn/nbe9oidHZp5cqI9G+w==", - "dependencies": { - "@firebase/component": "0.5.14", - "@firebase/logger": "0.3.2", - "@firebase/util": "1.6.0", - "@firebase/webchannel-wrapper": "0.6.1", + "version": "3.4.10", + "resolved": "https://registry.npmjs.org/@firebase/firestore/-/firestore-3.4.10.tgz", + "integrity": "sha512-QUW9B7U8G0zbontuEPCJaoD320AZPOM4skV+Jd+WJIUUrmg/pLCW68Tt9ycg6zQ+1WdJtzaOU35NPJS7VIP8Ug==", + "dependencies": { + "@firebase/component": "0.5.15", + "@firebase/logger": "0.3.3", + "@firebase/util": "1.6.1", + "@firebase/webchannel-wrapper": "0.6.2", "@grpc/grpc-js": "^1.3.2", "@grpc/proto-loader": "^0.6.0", "node-fetch": "2.6.7", @@ -2856,14 +2857,14 @@ } }, "node_modules/@firebase/firestore-compat": { - "version": "0.1.18", - "resolved": "https://registry.npmjs.org/@firebase/firestore-compat/-/firestore-compat-0.1.18.tgz", - "integrity": "sha512-D6VXudL/B2jlZ6MGpsDPHHm/DSpfKuUOnEb5wwH89Sw0nW5snSMNG8QfYTQYKUxrX35ma+nWUnaa18LlVTUMXQ==", + "version": "0.1.19", + "resolved": "https://registry.npmjs.org/@firebase/firestore-compat/-/firestore-compat-0.1.19.tgz", + "integrity": "sha512-fE3anYxNvX50zILPdGZaJBFcK3NPOHzZR7lLupFBsmd0YFtFT4E89p0QQ3A/oZK9/74jNuvjZoJ8hamknPkZHQ==", "dependencies": { - "@firebase/component": "0.5.14", - "@firebase/firestore": "3.4.9", + "@firebase/component": "0.5.15", + "@firebase/firestore": "3.4.10", "@firebase/firestore-types": "2.5.0", - "@firebase/util": "1.6.0", + "@firebase/util": "1.6.1", "tslib": "^2.1.0" }, "peerDependencies": { @@ -2890,15 +2891,15 @@ "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" }, "node_modules/@firebase/functions": { - "version": "0.8.1", - "resolved": "https://registry.npmjs.org/@firebase/functions/-/functions-0.8.1.tgz", - "integrity": "sha512-UF5187TPn1Q1sFmAUU1oZdKub1t0Z6MAjcskGS6CV4OwAkILZQ9v38LIbo3wnA62R5hr3IFpdEJxKkqHojMwSg==", + "version": "0.8.2", + "resolved": "https://registry.npmjs.org/@firebase/functions/-/functions-0.8.2.tgz", + "integrity": "sha512-w2ng6vodOYj7Xo/J3h0SN6NfpRzId00DOKZDvGylH+LoQPFBshHJmv2mpM5ljEntxWvtv3aGrjD6YvgKr9JUJA==", "dependencies": { "@firebase/app-check-interop-types": "0.1.0", "@firebase/auth-interop-types": "0.1.6", - "@firebase/component": "0.5.14", + "@firebase/component": "0.5.15", "@firebase/messaging-interop-types": "0.1.0", - "@firebase/util": "1.6.0", + "@firebase/util": "1.6.1", "node-fetch": "2.6.7", "tslib": "^2.1.0" }, @@ -2907,14 +2908,14 @@ } }, "node_modules/@firebase/functions-compat": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/@firebase/functions-compat/-/functions-compat-0.2.1.tgz", - "integrity": "sha512-1epI+TGb3CxpQrnoSJnKMUqBLn9b6KA1Rro6ISmZIEkaDEi8p8q3UI917XP+OewiPG71xvpySiEIIxWyktcl+A==", + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/@firebase/functions-compat/-/functions-compat-0.2.2.tgz", + "integrity": "sha512-CeAoQDVrrqjc6q0prgyO3mEDDQM84vSH09sNRRMxd9kTjZtKZD4DXf+BKfULSvMAK9mgmL70LBz8RsrcXs6YXg==", "dependencies": { - "@firebase/component": "0.5.14", - "@firebase/functions": "0.8.1", + "@firebase/component": "0.5.15", + "@firebase/functions": "0.8.2", "@firebase/functions-types": "0.5.0", - "@firebase/util": "1.6.0", + "@firebase/util": "1.6.1", "tslib": "^2.1.0" }, "peerDependencies": { @@ -2937,12 +2938,12 @@ "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" }, "node_modules/@firebase/installations": { - "version": "0.5.9", - "resolved": "https://registry.npmjs.org/@firebase/installations/-/installations-0.5.9.tgz", - "integrity": "sha512-0XvF9ig8Zj7MWP4Aq5/Wcyjq9f/cDtD6DKFJhp3BT1AjmACdmq7WD72xok8UBhkOiqymIiGd5eQf7rX225D2Sw==", + "version": "0.5.10", + "resolved": "https://registry.npmjs.org/@firebase/installations/-/installations-0.5.10.tgz", + "integrity": "sha512-lTnYmtGPXwLqjiqvS4KH/V9a3vtZYWBU3Lsx+iOndFkzEyEANQ4qwUgZsP94qWRFd1WumcgDqhFmoVeYkDQCew==", "dependencies": { - "@firebase/component": "0.5.14", - "@firebase/util": "1.6.0", + "@firebase/component": "0.5.15", + "@firebase/util": "1.6.1", "idb": "7.0.1", "tslib": "^2.1.0" }, @@ -2956,9 +2957,9 @@ "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" }, "node_modules/@firebase/logger": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/@firebase/logger/-/logger-0.3.2.tgz", - "integrity": "sha512-lzLrcJp9QBWpo40OcOM9B8QEtBw2Fk1zOZQdvv+rWS6gKmhQBCEMc4SMABQfWdjsylBcDfniD1Q+fUX1dcBTXA==", + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@firebase/logger/-/logger-0.3.3.tgz", + "integrity": "sha512-POTJl07jOKTOevLXrTvJD/VZ0M6PnJXflbAh5J9VGkmtXPXNG6MdZ9fmRgqYhXKTaDId6AQenQ262uwgpdtO0Q==", "dependencies": { "tslib": "^2.1.0" } @@ -2969,14 +2970,14 @@ "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" }, "node_modules/@firebase/messaging": { - "version": "0.9.13", - "resolved": "https://registry.npmjs.org/@firebase/messaging/-/messaging-0.9.13.tgz", - "integrity": "sha512-wR/SGYGG/bmz1gRqm6/eGI6zRg/X3qNP0BCk0Oa6xVDKK04UCE9zNRgQYgCSKNP+zuLfDhpHbXvvXQp9/vBYVA==", + "version": "0.9.14", + "resolved": "https://registry.npmjs.org/@firebase/messaging/-/messaging-0.9.14.tgz", + "integrity": "sha512-TrSDdZT/wI94m+kay4ibYDUsBiSkijU71zWhSXPJRGSUNuq8EP0ecs2eu01Kipb6ghl5YUiFFL/uY5Y6WK/I5A==", "dependencies": { - "@firebase/component": "0.5.14", - "@firebase/installations": "0.5.9", + "@firebase/component": "0.5.15", + "@firebase/installations": "0.5.10", "@firebase/messaging-interop-types": "0.1.0", - "@firebase/util": "1.6.0", + "@firebase/util": "1.6.1", "idb": "7.0.1", "tslib": "^2.1.0" }, @@ -2985,13 +2986,13 @@ } }, "node_modules/@firebase/messaging-compat": { - "version": "0.1.13", - "resolved": "https://registry.npmjs.org/@firebase/messaging-compat/-/messaging-compat-0.1.13.tgz", - "integrity": "sha512-kGuzjpl+pcTRmEgGDjyOKQnxxQgC7wIJIIHhLMIpfxHHL5+ysN1Tjq0Ztr1t/gcdHKErtnD/n9To5eoGZHqpzA==", + "version": "0.1.14", + "resolved": "https://registry.npmjs.org/@firebase/messaging-compat/-/messaging-compat-0.1.14.tgz", + "integrity": "sha512-XNF5+TxhbFa5nAmkf/PbbNFfmiTcyBfjIl322Me6ZYK4leC8+O9beR7w0wWei8+GhUSIHn3D69ZZRewUUkXukA==", "dependencies": { - "@firebase/component": "0.5.14", - "@firebase/messaging": "0.9.13", - "@firebase/util": "1.6.0", + "@firebase/component": "0.5.15", + "@firebase/messaging": "0.9.14", + "@firebase/util": "1.6.1", "tslib": "^2.1.0" }, "peerDependencies": { @@ -3014,14 +3015,14 @@ "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" }, "node_modules/@firebase/performance": { - "version": "0.5.9", - "resolved": "https://registry.npmjs.org/@firebase/performance/-/performance-0.5.9.tgz", - "integrity": "sha512-cA1pea1hkIZt0FG0a42tjKQNBhdY7q4apqHML92vBCS9QOOR0SHBui44IGQJRfRBGiVICHW03Q+ikSZv08g+jw==", + "version": "0.5.10", + "resolved": "https://registry.npmjs.org/@firebase/performance/-/performance-0.5.10.tgz", + "integrity": "sha512-rX+OsVMc6IIkrZqFmIjvEfRuRJ84ftPJDDpnqZ134pqTPr3MQgRzU/gPgLio8EdUN5YCthWyA8nB8NrEzBysSA==", "dependencies": { - "@firebase/component": "0.5.14", - "@firebase/installations": "0.5.9", - "@firebase/logger": "0.3.2", - "@firebase/util": "1.6.0", + "@firebase/component": "0.5.15", + "@firebase/installations": "0.5.10", + "@firebase/logger": "0.3.3", + "@firebase/util": "1.6.1", "tslib": "^2.1.0" }, "peerDependencies": { @@ -3029,15 +3030,15 @@ } }, "node_modules/@firebase/performance-compat": { - "version": "0.1.9", - "resolved": "https://registry.npmjs.org/@firebase/performance-compat/-/performance-compat-0.1.9.tgz", - "integrity": "sha512-EBX4u/uK76ikJSyoWZ2cEMj63G01w1DA68KDpSypSMhKPJE2eiCtWABRTSXhcaisq/FDwZzl4XhNjDyfzArwhA==", + "version": "0.1.10", + "resolved": "https://registry.npmjs.org/@firebase/performance-compat/-/performance-compat-0.1.10.tgz", + "integrity": "sha512-WhY2pjpXHiyRfnk9t3/BKGK/C0u4pC61mEYh8t8MLayz8KwuiavJj1wuCN2nG2R0y8CXZAsifFLQs1h0K3XzDA==", "dependencies": { - "@firebase/component": "0.5.14", - "@firebase/logger": "0.3.2", - "@firebase/performance": "0.5.9", + "@firebase/component": "0.5.15", + "@firebase/logger": "0.3.3", + "@firebase/performance": "0.5.10", "@firebase/performance-types": "0.1.0", - "@firebase/util": "1.6.0", + "@firebase/util": "1.6.1", "tslib": "^2.1.0" }, "peerDependencies": { @@ -3085,14 +3086,14 @@ "integrity": "sha512-dcQ1GWpOD/eEQ97k66aiEVpNnapVj90/+R+SXTPYGHpYBBypfKJEQjLrvMZ7YXbKm21gXd4NcuxUTjiv1YtLng==" }, "node_modules/@firebase/remote-config": { - "version": "0.3.8", - "resolved": "https://registry.npmjs.org/@firebase/remote-config/-/remote-config-0.3.8.tgz", - "integrity": "sha512-z5HYrjrgzkR25nlvQqiPowDGatlEJirA5sN1B6rOy+KYMLsb6IXLVOdKjj/Tg/uHAErwd0DblGxwBUZKTCuo1g==", + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/@firebase/remote-config/-/remote-config-0.3.9.tgz", + "integrity": "sha512-SQ7tArNyI3sPlbmyAB3X2rS8lHcVlPWIQPRLCmgpKjPKM6Jsv7onCUK+M23DW95iEjK4vEVU5QkxUP3fUXWkxg==", "dependencies": { - "@firebase/component": "0.5.14", - "@firebase/installations": "0.5.9", - "@firebase/logger": "0.3.2", - "@firebase/util": "1.6.0", + "@firebase/component": "0.5.15", + "@firebase/installations": "0.5.10", + "@firebase/logger": "0.3.3", + "@firebase/util": "1.6.1", "tslib": "^2.1.0" }, "peerDependencies": { @@ -3100,15 +3101,15 @@ } }, "node_modules/@firebase/remote-config-compat": { - "version": "0.1.9", - "resolved": "https://registry.npmjs.org/@firebase/remote-config-compat/-/remote-config-compat-0.1.9.tgz", - "integrity": "sha512-ud4yINy8cegE82KoBDXS4fOp6qwy0+7zl0k587kMXHSWHbWVRZ/uKMQGJQc7kG0EQp0tZhM20CxVwtcCGsABBA==", + "version": "0.1.10", + "resolved": "https://registry.npmjs.org/@firebase/remote-config-compat/-/remote-config-compat-0.1.10.tgz", + "integrity": "sha512-FSZg9JqgnYIDV78J74W6JUANGjrzCgTRKHioBifONo3e2CdEqQKrvIuGCXEE9+9vYyuqNEtmv5DUIPC4n6XYCQ==", "dependencies": { - "@firebase/component": "0.5.14", - "@firebase/logger": "0.3.2", - "@firebase/remote-config": "0.3.8", + "@firebase/component": "0.5.15", + "@firebase/logger": "0.3.3", + "@firebase/remote-config": "0.3.9", "@firebase/remote-config-types": "0.2.0", - "@firebase/util": "1.6.0", + "@firebase/util": "1.6.1", "tslib": "^2.1.0" }, "peerDependencies": { @@ -3131,12 +3132,12 @@ "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" }, "node_modules/@firebase/storage": { - "version": "0.9.6", - "resolved": "https://registry.npmjs.org/@firebase/storage/-/storage-0.9.6.tgz", - "integrity": "sha512-q8/s3qFbFl+AlKbyEtGA7FRVhcMu3NKPqHueBTn5XSI0B3bfxptBcDJMb9txs69ppve6P3jrK1//TEWpjTGJUg==", + "version": "0.9.7", + "resolved": "https://registry.npmjs.org/@firebase/storage/-/storage-0.9.7.tgz", + "integrity": "sha512-0unWzgx5bceyO3SX/ilHaxwwHidN5sXZGakFLjAn8cbpjVpmybcKaLOduBxlMXeDCdUFfO8FcvEajFkV+0t2hA==", "dependencies": { - "@firebase/component": "0.5.14", - "@firebase/util": "1.6.0", + "@firebase/component": "0.5.15", + "@firebase/util": "1.6.1", "node-fetch": "2.6.7", "tslib": "^2.1.0" }, @@ -3145,14 +3146,14 @@ } }, "node_modules/@firebase/storage-compat": { - "version": "0.1.14", - "resolved": "https://registry.npmjs.org/@firebase/storage-compat/-/storage-compat-0.1.14.tgz", - "integrity": "sha512-/Fey1n+ryIeAEyd/qXPXh32ReFZUhzE5W0z/+LDA+3yyMGw/a6wCzQqe7wBiGiCRhjd+5XiV++jkCXTflun3Dg==", + "version": "0.1.15", + "resolved": "https://registry.npmjs.org/@firebase/storage-compat/-/storage-compat-0.1.15.tgz", + "integrity": "sha512-XjqAYIc8oJv6OAeeLdCUC3KF0wXAzRoBGktRhPMc9umSxVE7Dnr960kF6qtdAbLFGi/uhj478AdpKSQgZ75rQA==", "dependencies": { - "@firebase/component": "0.5.14", - "@firebase/storage": "0.9.6", + "@firebase/component": "0.5.15", + "@firebase/storage": "0.9.7", "@firebase/storage-types": "0.6.0", - "@firebase/util": "1.6.0", + "@firebase/util": "1.6.1", "tslib": "^2.1.0" }, "peerDependencies": { @@ -3179,9 +3180,9 @@ "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" }, "node_modules/@firebase/util": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/@firebase/util/-/util-1.6.0.tgz", - "integrity": "sha512-6+hhqb4Zzjoo12xofTDHPkgW3FnN4ydBsjd5X2KuQI268DR3W3Ld64W/gkKPZrKRgUxeNeb+pykfP3qRe7q+vA==", + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/@firebase/util/-/util-1.6.1.tgz", + "integrity": "sha512-+eDE6uG5GgvXYHbAzfP1mpJUX1VDBD+A8CjBeBoNAKAVAApMSDxDODqRcOq7NW7kFJXSUkMzDJWhnUIifX2R8w==", "dependencies": { "tslib": "^2.1.0" } @@ -3192,9 +3193,9 @@ "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" }, "node_modules/@firebase/webchannel-wrapper": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/@firebase/webchannel-wrapper/-/webchannel-wrapper-0.6.1.tgz", - "integrity": "sha512-9FqhNjKQWpQ3fGnSOCovHOm+yhhiorKEqYLAfd525jWavunDJcx8rOW6i6ozAh+FbwcYMkL7b+3j4UR/30MpoQ==" + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/@firebase/webchannel-wrapper/-/webchannel-wrapper-0.6.2.tgz", + "integrity": "sha512-zThUKcqIU6utWzM93uEvhlh8qj8A5LMPFJPvk/ODb+8GSSif19xM2Lw1M2ijyBy8+6skSkQBbavPzOU5Oh/8tQ==" }, "node_modules/@formatjs/ecma402-abstract": { "version": "1.11.4", @@ -5330,6 +5331,31 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/@monaco-editor/loader": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/@monaco-editor/loader/-/loader-1.3.2.tgz", + "integrity": "sha512-BTDbpHl3e47r3AAtpfVFTlAi7WXv4UQ/xZmz8atKl4q7epQV5e7+JbigFDViWF71VBi4IIBdcWP57Hj+OWuc9g==", + "dependencies": { + "state-local": "^1.0.6" + }, + "peerDependencies": { + "monaco-editor": ">= 0.21.0 < 1" + } + }, + "node_modules/@monaco-editor/react": { + "version": "4.4.5", + "resolved": "https://registry.npmjs.org/@monaco-editor/react/-/react-4.4.5.tgz", + "integrity": "sha512-IImtzU7sRc66OOaQVCG+5PFHkSWnnhrUWGBuH6zNmH2h0YgmAhcjHZQc/6MY9JWEbUtVF1WPBMJ9u1XuFbRrVA==", + "dependencies": { + "@monaco-editor/loader": "^1.3.2", + "prop-types": "^15.7.2" + }, + "peerDependencies": { + "monaco-editor": ">= 0.25.0 < 1", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0" + } + }, "node_modules/@mrmlnc/readdir-enhanced": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/@mrmlnc/readdir-enhanced/-/readdir-enhanced-2.2.1.tgz", @@ -5434,23 +5460,6 @@ "node": ">=10" } }, - "node_modules/@popmotion/easing": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@popmotion/easing/-/easing-1.0.2.tgz", - "integrity": "sha512-IkdW0TNmRnWTeWI7aGQIVDbKXPWHVEYdGgd5ZR4SH/Ty/61p63jCjrPxX1XrR7IGkl08bjhJROStD7j+RKgoIw==" - }, - "node_modules/@popmotion/popcorn": { - "version": "0.4.4", - "resolved": "https://registry.npmjs.org/@popmotion/popcorn/-/popcorn-0.4.4.tgz", - "integrity": "sha512-jYO/8319fKoNLMlY4ZJPiPu8Ea8occYwRZhxpaNn/kZsK4QG2E7XFlXZMJBsTWDw7I1i0uaqyC4zn1nwEezLzg==", - "dependencies": { - "@popmotion/easing": "^1.0.1", - "framesync": "^4.0.1", - "hey-listen": "^1.0.8", - "style-value-types": "^3.1.7", - "tslib": "^1.10.0" - } - }, "node_modules/@protobufjs/aspromise": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", @@ -6586,18 +6595,18 @@ "dev": true }, "node_modules/@storybook/addon-actions": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/addon-actions/-/addon-actions-6.5.7.tgz", - "integrity": "sha512-gTkPr2FYX+vySZKEg5Wq7uHPkVUq3hJ7ZKvGls+/xjgaTwfu3iIly53FEFUl8A6kMQ+4gtTC+YRr3cSJgXMbAg==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/addon-actions/-/addon-actions-6.5.8.tgz", + "integrity": "sha512-9ciR1iWBTzQNBDlq0iQs9+TV7gng+FbQxW5mHNxNvT9SxY1dt02wCPHZeVE/5la61wBXZs/zpEepZA93VzVBDw==", "dev": true, "dependencies": { - "@storybook/addons": "6.5.7", - "@storybook/api": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/components": "6.5.7", - "@storybook/core-events": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/api": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/components": "6.5.8", + "@storybook/core-events": "6.5.8", "@storybook/csf": "0.0.2--canary.4566f4d.1", - "@storybook/theming": "6.5.7", + "@storybook/theming": "6.5.8", "core-js": "^3.8.2", "fast-deep-equal": "^3.1.3", "global": "^4.4.0", @@ -6629,18 +6638,18 @@ } }, "node_modules/@storybook/addon-backgrounds": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/addon-backgrounds/-/addon-backgrounds-6.5.7.tgz", - "integrity": "sha512-ryisDpxbIEZbYJkQWU5xvsj940jhWrWizedFsY9g/qBIBi33UrW/H1hKZQtmg0bzuNTgYcBjRy50ikJgH/eKAQ==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/addon-backgrounds/-/addon-backgrounds-6.5.8.tgz", + "integrity": "sha512-pvlP5ZVVfd2sWzgCqG/f6RJX/h9648znYbzaLQ4Z6whQIFobP3H3/cj9k/RTy3uXg5vC0IWDHSEaCXgin2sW1Q==", "dev": true, "dependencies": { - "@storybook/addons": "6.5.7", - "@storybook/api": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/components": "6.5.7", - "@storybook/core-events": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/api": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/components": "6.5.8", + "@storybook/core-events": "6.5.8", "@storybook/csf": "0.0.2--canary.4566f4d.1", - "@storybook/theming": "6.5.7", + "@storybook/theming": "6.5.8", "core-js": "^3.8.2", "global": "^4.4.0", "memoizerific": "^1.11.3", @@ -6666,20 +6675,20 @@ } }, "node_modules/@storybook/addon-controls": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/addon-controls/-/addon-controls-6.5.7.tgz", - "integrity": "sha512-1JGphHk1gcLLpkft/D5BkygXwelSdWQqvXnfFc62BVqvzxv8hCF4zuUosKLWMlB/nzVbd6W4oEDV/Mqmt6h/7w==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/addon-controls/-/addon-controls-6.5.8.tgz", + "integrity": "sha512-fB6p5DgVHUnJKUzOlT2mtvaSCincnO+vuYLyf++f+l4BlYK1Es9HNl/puaRoMgdW+LoGJjXPTIMcMByeHVIt6Q==", "dev": true, "dependencies": { - "@storybook/addons": "6.5.7", - "@storybook/api": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/components": "6.5.7", - "@storybook/core-common": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/api": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/components": "6.5.8", + "@storybook/core-common": "6.5.8", "@storybook/csf": "0.0.2--canary.4566f4d.1", - "@storybook/node-logger": "6.5.7", - "@storybook/store": "6.5.7", - "@storybook/theming": "6.5.7", + "@storybook/node-logger": "6.5.8", + "@storybook/store": "6.5.8", + "@storybook/theming": "6.5.8", "core-js": "^3.8.2", "lodash": "^4.17.21", "ts-dedent": "^2.0.0" @@ -6702,29 +6711,29 @@ } }, "node_modules/@storybook/addon-docs": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/addon-docs/-/addon-docs-6.5.7.tgz", - "integrity": "sha512-RghRpimJOJl9c/H6qvCCD0zHLETBIVWXsdYJF8GiY6iTKd+tgQYizuuoBT4f3PAMEMHVhmvWSjkkFLxKxzQLjQ==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/addon-docs/-/addon-docs-6.5.8.tgz", + "integrity": "sha512-pAvWwh5YCrsW9nHCrd5BpFigvqn92JisX0aEnwAqKC9B1AW1LxhdPn1o9CQCeszQGaq163RA6AzkCejvAqhtUQ==", "dev": true, "dependencies": { "@babel/plugin-transform-react-jsx": "^7.12.12", "@babel/preset-env": "^7.12.11", "@jest/transform": "^26.6.2", "@mdx-js/react": "^1.6.22", - "@storybook/addons": "6.5.7", - "@storybook/api": "6.5.7", - "@storybook/components": "6.5.7", - "@storybook/core-common": "6.5.7", - "@storybook/core-events": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/api": "6.5.8", + "@storybook/components": "6.5.8", + "@storybook/core-common": "6.5.8", + "@storybook/core-events": "6.5.8", "@storybook/csf": "0.0.2--canary.4566f4d.1", - "@storybook/docs-tools": "6.5.7", + "@storybook/docs-tools": "6.5.8", "@storybook/mdx1-csf": "^0.0.1", - "@storybook/node-logger": "6.5.7", - "@storybook/postinstall": "6.5.7", - "@storybook/preview-web": "6.5.7", - "@storybook/source-loader": "6.5.7", - "@storybook/store": "6.5.7", - "@storybook/theming": "6.5.7", + "@storybook/node-logger": "6.5.8", + "@storybook/postinstall": "6.5.8", + "@storybook/preview-web": "6.5.8", + "@storybook/source-loader": "6.5.8", + "@storybook/store": "6.5.8", + "@storybook/theming": "6.5.8", "babel-loader": "^8.0.0", "core-js": "^3.8.2", "fast-deep-equal": "^3.1.3", @@ -6758,23 +6767,23 @@ } }, "node_modules/@storybook/addon-essentials": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/addon-essentials/-/addon-essentials-6.5.7.tgz", - "integrity": "sha512-JZ80W9PgZOEUp2SjhBYyYHxQduxSIe4n9Wdoy8XDtV28152jDNms6UPjFeEVb+a9rVybYOwWnOnEhBWF6ZfJ/g==", - "dev": true, - "dependencies": { - "@storybook/addon-actions": "6.5.7", - "@storybook/addon-backgrounds": "6.5.7", - "@storybook/addon-controls": "6.5.7", - "@storybook/addon-docs": "6.5.7", - "@storybook/addon-measure": "6.5.7", - "@storybook/addon-outline": "6.5.7", - "@storybook/addon-toolbars": "6.5.7", - "@storybook/addon-viewport": "6.5.7", - "@storybook/addons": "6.5.7", - "@storybook/api": "6.5.7", - "@storybook/core-common": "6.5.7", - "@storybook/node-logger": "6.5.7", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/addon-essentials/-/addon-essentials-6.5.8.tgz", + "integrity": "sha512-K/Aw/GLugCz5/S3c2tz5lnfC8aN6dSoQQDr8xaMDcBlT9h/xZ1l4jQQnx/mvY/qEvXtexBF41DE6ROWGKSZeSg==", + "dev": true, + "dependencies": { + "@storybook/addon-actions": "6.5.8", + "@storybook/addon-backgrounds": "6.5.8", + "@storybook/addon-controls": "6.5.8", + "@storybook/addon-docs": "6.5.8", + "@storybook/addon-measure": "6.5.8", + "@storybook/addon-outline": "6.5.8", + "@storybook/addon-toolbars": "6.5.8", + "@storybook/addon-viewport": "6.5.8", + "@storybook/addons": "6.5.8", + "@storybook/api": "6.5.8", + "@storybook/core-common": "6.5.8", + "@storybook/node-logger": "6.5.8", "core-js": "^3.8.2", "regenerator-runtime": "^0.13.7", "ts-dedent": "^2.0.0" @@ -6841,16 +6850,16 @@ } }, "node_modules/@storybook/addon-measure": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/addon-measure/-/addon-measure-6.5.7.tgz", - "integrity": "sha512-NMth6CErySKQ9WnfzMZ4nelHa2bBzZ60ZgsDq5s5dKHhJzZPm2nclmGAGE+VhqI/USe8b1fnjKFeHH485T8J2g==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/addon-measure/-/addon-measure-6.5.8.tgz", + "integrity": "sha512-zpNAt1XwBLnQ3OjCfj63J7vC2WCTyAjvbGVAsUkpQb21vr/e3sPFQZPKGwio85SYjIX7AJ+Oi28mbEwWzS8wFA==", "dev": true, "dependencies": { - "@storybook/addons": "6.5.7", - "@storybook/api": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/components": "6.5.7", - "@storybook/core-events": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/api": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/components": "6.5.8", + "@storybook/core-events": "6.5.8", "@storybook/csf": "0.0.2--canary.4566f4d.1", "core-js": "^3.8.2", "global": "^4.4.0" @@ -6873,16 +6882,16 @@ } }, "node_modules/@storybook/addon-outline": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/addon-outline/-/addon-outline-6.5.7.tgz", - "integrity": "sha512-qTu19FnZz+rjY7SxPOgiQkuAxHRNRhUYgvUwI+ep0ZQcBddsRgniQjzXtErlUMeVoMZ63mDuOaJp67ltkriAOQ==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/addon-outline/-/addon-outline-6.5.8.tgz", + "integrity": "sha512-/bEjYTVJNM5QEiguS5nVQlerl5NhgOod1zLExnkchc8+FTJC58Vy7CRfzr2iaIMuf1QRPqBwSIy6ZqLJOdUfnQ==", "dev": true, "dependencies": { - "@storybook/addons": "6.5.7", - "@storybook/api": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/components": "6.5.7", - "@storybook/core-events": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/api": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/components": "6.5.8", + "@storybook/core-events": "6.5.8", "@storybook/csf": "0.0.2--canary.4566f4d.1", "core-js": "^3.8.2", "global": "^4.4.0", @@ -6907,16 +6916,16 @@ } }, "node_modules/@storybook/addon-toolbars": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/addon-toolbars/-/addon-toolbars-6.5.7.tgz", - "integrity": "sha512-+MUG5t4isQNf+q7BpEsGwuYAvYgs9XTdzzdvL/9jedQ7udJsWmG1q9a6m9+iQGPr/WK+88F2kgSOknpib3J21w==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/addon-toolbars/-/addon-toolbars-6.5.8.tgz", + "integrity": "sha512-16eRbbtn4/cH1xU8JlPZRdShwUwSsPcqpyH1JNl+rgYQ6SaSNq3aO/jDFeQe93guSD0YPRWHz8dKtn6OxVeozQ==", "dev": true, "dependencies": { - "@storybook/addons": "6.5.7", - "@storybook/api": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/components": "6.5.7", - "@storybook/theming": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/api": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/components": "6.5.8", + "@storybook/theming": "6.5.8", "core-js": "^3.8.2", "regenerator-runtime": "^0.13.7" }, @@ -6938,17 +6947,17 @@ } }, "node_modules/@storybook/addon-viewport": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/addon-viewport/-/addon-viewport-6.5.7.tgz", - "integrity": "sha512-8VmSTGKY3+9kZ09THC7546OaFbjLu5kEAGU5ZFSZaNlsJwRg7bC3bScKbnyX5EhihgZ3W8oJt/eMAIqXKHxA8g==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/addon-viewport/-/addon-viewport-6.5.8.tgz", + "integrity": "sha512-MTpZWkBWNPH55iNHK4tBNKTdew5xKfoNvOj0pZn1rYDHlylMTlq7aoccwRjjK2jZeHHNnb1rm6ZkQDjmYu0Tcw==", "dev": true, "dependencies": { - "@storybook/addons": "6.5.7", - "@storybook/api": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/components": "6.5.7", - "@storybook/core-events": "6.5.7", - "@storybook/theming": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/api": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/components": "6.5.8", + "@storybook/core-events": "6.5.8", + "@storybook/theming": "6.5.8", "core-js": "^3.8.2", "global": "^4.4.0", "memoizerific": "^1.11.3", @@ -6973,18 +6982,18 @@ } }, "node_modules/@storybook/addons": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/addons/-/addons-6.5.7.tgz", - "integrity": "sha512-tUZ2c1uegUcwY31ztNQZGU/HUwAEEGIR8fEOvvO8S0TNQGoo6cwFtZmWBh3mTSRGcmzK2SNBjFHZua5Ee9TefA==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/addons/-/addons-6.5.8.tgz", + "integrity": "sha512-L3LmbsYP9tDHHvpr/yv8YuEkzym7SXp/jZ0km31tpG3EuZmgGu7MXPrZ2ymEw4PkAhQzztgRr23VTfKobGUojA==", "dev": true, "dependencies": { - "@storybook/api": "6.5.7", - "@storybook/channels": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/core-events": "6.5.7", + "@storybook/api": "6.5.8", + "@storybook/channels": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/core-events": "6.5.8", "@storybook/csf": "0.0.2--canary.4566f4d.1", - "@storybook/router": "6.5.7", - "@storybook/theming": "6.5.7", + "@storybook/router": "6.5.8", + "@storybook/theming": "6.5.8", "@types/webpack-env": "^1.16.0", "core-js": "^3.8.2", "global": "^4.4.0", @@ -7000,18 +7009,18 @@ } }, "node_modules/@storybook/api": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/api/-/api-6.5.7.tgz", - "integrity": "sha512-QCNypz4X+lYuFW7EzvRPXMf8uS3gfSIV8sqXtEe5XoMb0HQXhy6AGU7/4iAeuUimtETqLTxq+kOxaSg4uPowxg==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/api/-/api-6.5.8.tgz", + "integrity": "sha512-/MueV+wLCvy9gFA3ih4g7QYjDmn14i+D2ydonfaEC7R+agFGXxXwJGPKkz3yBNrRpNkBwcbY9mAmv8lE2AqgqQ==", "dev": true, "dependencies": { - "@storybook/channels": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/core-events": "6.5.7", + "@storybook/channels": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/core-events": "6.5.8", "@storybook/csf": "0.0.2--canary.4566f4d.1", - "@storybook/router": "6.5.7", + "@storybook/router": "6.5.8", "@storybook/semver": "^7.3.2", - "@storybook/theming": "6.5.7", + "@storybook/theming": "6.5.8", "core-js": "^3.8.2", "fast-deep-equal": "^3.1.3", "global": "^4.4.0", @@ -7033,28 +7042,28 @@ } }, "node_modules/@storybook/builder-webpack4": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/builder-webpack4/-/builder-webpack4-6.5.7.tgz", - "integrity": "sha512-8OB3mZ2L6kQBiAXlkhna/MHREXIPtqXi2AJLT3+bTzBlqkusH+PwMZxWHbcPl1vZrlNQBC40Elx9tdynGkVQ6g==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/builder-webpack4/-/builder-webpack4-6.5.8.tgz", + "integrity": "sha512-4/CVp/AlOxCeWZ/DF1TVS/TuzHao4l9KCq7DhL+utFEVl9c/dpgoZXc0Gy2FfHa2RXHKckrH/VUfV2KQk4TNSw==", "dev": true, "dependencies": { "@babel/core": "^7.12.10", - "@storybook/addons": "6.5.7", - "@storybook/api": "6.5.7", - "@storybook/channel-postmessage": "6.5.7", - "@storybook/channels": "6.5.7", - "@storybook/client-api": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/components": "6.5.7", - "@storybook/core-common": "6.5.7", - "@storybook/core-events": "6.5.7", - "@storybook/node-logger": "6.5.7", - "@storybook/preview-web": "6.5.7", - "@storybook/router": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/api": "6.5.8", + "@storybook/channel-postmessage": "6.5.8", + "@storybook/channels": "6.5.8", + "@storybook/client-api": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/components": "6.5.8", + "@storybook/core-common": "6.5.8", + "@storybook/core-events": "6.5.8", + "@storybook/node-logger": "6.5.8", + "@storybook/preview-web": "6.5.8", + "@storybook/router": "6.5.8", "@storybook/semver": "^7.3.2", - "@storybook/store": "6.5.7", - "@storybook/theming": "6.5.7", - "@storybook/ui": "6.5.7", + "@storybook/store": "6.5.8", + "@storybook/theming": "6.5.8", + "@storybook/ui": "6.5.8", "@types/node": "^14.0.10 || ^16.0.0", "@types/webpack": "^4.41.26", "autoprefixer": "^9.8.6", @@ -7734,7 +7743,7 @@ "node_modules/@storybook/builder-webpack4/node_modules/to-regex-range": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", - "integrity": "sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", "dev": true, "dependencies": { "is-number": "^3.0.0", @@ -7849,27 +7858,27 @@ "dev": true }, "node_modules/@storybook/builder-webpack5": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/builder-webpack5/-/builder-webpack5-6.5.7.tgz", - "integrity": "sha512-3mbQ09KBTUsFYxnEtR4vr7W1wodRen3o8fANY5XxvE1sr1TopHVOKpIlePjrpcrXcLKFI/ZWrX3IfK88LCuI9w==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/builder-webpack5/-/builder-webpack5-6.5.8.tgz", + "integrity": "sha512-bc7LSGzOqTUImejsfjWAHEHwBreoPQKS6pfnWYkjKMvfvWOwlHSAxwOSM5DyS4cvpcpMDG8yBJNz2QcvXFVLxA==", "dev": true, "dependencies": { "@babel/core": "^7.12.10", - "@storybook/addons": "6.5.7", - "@storybook/api": "6.5.7", - "@storybook/channel-postmessage": "6.5.7", - "@storybook/channels": "6.5.7", - "@storybook/client-api": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/components": "6.5.7", - "@storybook/core-common": "6.5.7", - "@storybook/core-events": "6.5.7", - "@storybook/node-logger": "6.5.7", - "@storybook/preview-web": "6.5.7", - "@storybook/router": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/api": "6.5.8", + "@storybook/channel-postmessage": "6.5.8", + "@storybook/channels": "6.5.8", + "@storybook/client-api": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/components": "6.5.8", + "@storybook/core-common": "6.5.8", + "@storybook/core-events": "6.5.8", + "@storybook/node-logger": "6.5.8", + "@storybook/preview-web": "6.5.8", + "@storybook/router": "6.5.8", "@storybook/semver": "^7.3.2", - "@storybook/store": "6.5.7", - "@storybook/theming": "6.5.7", + "@storybook/store": "6.5.8", + "@storybook/theming": "6.5.8", "@types/node": "^14.0.10 || ^16.0.0", "babel-loader": "^8.0.0", "babel-plugin-named-exports-order": "^0.0.2", @@ -7920,9 +7929,9 @@ "dev": true }, "node_modules/@storybook/builder-webpack5/node_modules/acorn": { - "version": "8.7.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.0.tgz", - "integrity": "sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ==", + "version": "8.7.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.1.tgz", + "integrity": "sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A==", "dev": true, "optional": true, "peer": true, @@ -8482,14 +8491,14 @@ "dev": true }, "node_modules/@storybook/channel-postmessage": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/channel-postmessage/-/channel-postmessage-6.5.7.tgz", - "integrity": "sha512-X4UPgm4O0503CsSnqAM1ht/6R9ofnoMcqFZxYRu9PSvHlhaFR9V9AU4VjQhakH7alFzRsAhcAV2PFVTAdWhgtA==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/channel-postmessage/-/channel-postmessage-6.5.8.tgz", + "integrity": "sha512-6IkIKk+UMYKk05vN8gWHvvOV/EZNXpQG/5gesGDALjkCyvRmcktHak1a9tHpoihZ3L7/gDwXOZraCZmuy8vBcQ==", "dev": true, "dependencies": { - "@storybook/channels": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/core-events": "6.5.7", + "@storybook/channels": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/core-events": "6.5.8", "core-js": "^3.8.2", "global": "^4.4.0", "qs": "^6.10.0", @@ -8501,13 +8510,13 @@ } }, "node_modules/@storybook/channel-websocket": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/channel-websocket/-/channel-websocket-6.5.7.tgz", - "integrity": "sha512-C+l6t3ZgHzU8gL8GJ8c4GMttJglGJIwq1LtJJKnGzx2kJCD0HRMMqc/qFS2K2EwP99hLwwGIlCpom3UZ1aEanA==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/channel-websocket/-/channel-websocket-6.5.8.tgz", + "integrity": "sha512-lAtvgO0FWsyS3u7uFbsGIYp2aSWJfWU/LOtc3x1K5c84JJAd9fncYkyZMwP1gMbdNgYxJoxe8HXtVtfeNegPuQ==", "dev": true, "dependencies": { - "@storybook/channels": "6.5.7", - "@storybook/client-logger": "6.5.7", + "@storybook/channels": "6.5.8", + "@storybook/client-logger": "6.5.8", "core-js": "^3.8.2", "global": "^4.4.0", "telejson": "^6.0.8" @@ -8518,9 +8527,9 @@ } }, "node_modules/@storybook/channels": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/channels/-/channels-6.5.7.tgz", - "integrity": "sha512-v880fWBpWgiWrDmZesTIstNfMZhrPfgXAtLNcL5Z89NAPahsHskOSszc0BDxKN3gb+ZeTKUqHxY57dQdp+1rhg==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/channels/-/channels-6.5.8.tgz", + "integrity": "sha512-fNql1lEIvWlI1NiRtwFMWOOvfW6qxgeSP6xoqiAJ0b+QYegEFG9UxJDuEvVHq++S81FulgQ5U+p+5R9XSV19tQ==", "dev": true, "dependencies": { "core-js": "^3.8.2", @@ -8533,18 +8542,18 @@ } }, "node_modules/@storybook/client-api": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/client-api/-/client-api-6.5.7.tgz", - "integrity": "sha512-na8NZhB6GnAGp3jRTV9wwue3WGwSZoi5jfxrKSYMPL/s/2n07/soixHggqueBDXuNBrPoJaXbY/nRHmSjLwxtQ==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/client-api/-/client-api-6.5.8.tgz", + "integrity": "sha512-mdU+qQ4+T2OUbEnl+3MWRKxEPju/EOIUg66hMgmif8c5u7YFYBFulUMUYLICMjll8Jlu+37+g+qO3K2eEz6CEw==", "dev": true, "dependencies": { - "@storybook/addons": "6.5.7", - "@storybook/channel-postmessage": "6.5.7", - "@storybook/channels": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/core-events": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/channel-postmessage": "6.5.8", + "@storybook/channels": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/core-events": "6.5.8", "@storybook/csf": "0.0.2--canary.4566f4d.1", - "@storybook/store": "6.5.7", + "@storybook/store": "6.5.8", "@types/qs": "^6.9.5", "@types/webpack-env": "^1.16.0", "core-js": "^3.8.2", @@ -8569,9 +8578,9 @@ } }, "node_modules/@storybook/client-logger": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/client-logger/-/client-logger-6.5.7.tgz", - "integrity": "sha512-ycDy1kXeXRg3djSTXRGMVxc0kvaWw/UhHDs2VGFmOPScsoeWpdbePHXJMFbsqippxuexpsofqTryBwH2b6BPhw==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/client-logger/-/client-logger-6.5.8.tgz", + "integrity": "sha512-dH6HSaVuOIMHy1+rpsqcD3SJxVZEEbuEtsNpdUGwLJaIuduhUJJpM2xQfUW0siZDyrgwoa+znll+G0YNUbv7sg==", "dev": true, "dependencies": { "core-js": "^3.8.2", @@ -8583,14 +8592,14 @@ } }, "node_modules/@storybook/components": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/components/-/components-6.5.7.tgz", - "integrity": "sha512-xSOaOK8q6bXYkmN4LZKucvXU2HRHqKwwTafFDh5yzsCSEB2VQIJlyo4ePVyv/GJgBUX6+WdSA7c5r5ePXK6IYQ==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/components/-/components-6.5.8.tgz", + "integrity": "sha512-YE+LZ1/GXoqertxodsf+L9ehcohbICRAxgE/iNqc7MZfk95SD3XRSUbxhCpGe8QTIZJpzs1tK4LFZ3Fg5w/+Lg==", "dev": true, "dependencies": { - "@storybook/client-logger": "6.5.7", + "@storybook/client-logger": "6.5.8", "@storybook/csf": "0.0.2--canary.4566f4d.1", - "@storybook/theming": "6.5.7", + "@storybook/theming": "6.5.8", "@types/react-syntax-highlighter": "11.0.5", "core-js": "^3.8.2", "qs": "^6.10.0", @@ -8608,13 +8617,13 @@ } }, "node_modules/@storybook/core": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/core/-/core-6.5.7.tgz", - "integrity": "sha512-YSu2qur1E5y9rjVspchtCfupPT3y1XyjBInhwzo8jC3rvm2WY0RS80VQU3dga4QBllO1M+cDmLzmOEPL82+Juw==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/core/-/core-6.5.8.tgz", + "integrity": "sha512-+Fv4n1E5N4Avty9GcRbz4vB2IWH//se2OUU+RTT3vneCOGjyus5bj0Or6GU5wef5UGuvHF78mHg/frhWpguzsw==", "dev": true, "dependencies": { - "@storybook/core-client": "6.5.7", - "@storybook/core-server": "6.5.7" + "@storybook/core-client": "6.5.8", + "@storybook/core-server": "6.5.8" }, "funding": { "type": "opencollective", @@ -8638,21 +8647,21 @@ } }, "node_modules/@storybook/core-client": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/core-client/-/core-client-6.5.7.tgz", - "integrity": "sha512-GL7m33tpEyornhfnTddbvDuLkA9EMe1zKv9oZGsUYo78cWRTiEibYyHegIi9/ThplRXvpFR/5uHY4Zx5Z5rxJg==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/core-client/-/core-client-6.5.8.tgz", + "integrity": "sha512-8x8qKQ2clvpfDcoWrNBmQ8Xt9z/i32TFIBp4PEZMcbB7eqo517nzfllLiXDipiJgO7BGxKtY5CRHQ9pAU9G27A==", "dev": true, "dependencies": { - "@storybook/addons": "6.5.7", - "@storybook/channel-postmessage": "6.5.7", - "@storybook/channel-websocket": "6.5.7", - "@storybook/client-api": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/core-events": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/channel-postmessage": "6.5.8", + "@storybook/channel-websocket": "6.5.8", + "@storybook/client-api": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/core-events": "6.5.8", "@storybook/csf": "0.0.2--canary.4566f4d.1", - "@storybook/preview-web": "6.5.7", - "@storybook/store": "6.5.7", - "@storybook/ui": "6.5.7", + "@storybook/preview-web": "6.5.8", + "@storybook/store": "6.5.8", + "@storybook/ui": "6.5.8", "airbnb-js-shims": "^2.2.1", "ansi-to-html": "^0.6.11", "core-js": "^3.8.2", @@ -8680,9 +8689,9 @@ } }, "node_modules/@storybook/core-common": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/core-common/-/core-common-6.5.7.tgz", - "integrity": "sha512-/b1oQlmhek8tKDu9ky2O1oEk9g2giAPpl192yRz4lIxap5CFJ7RCfgbkq+F3JBXnH2P84BufC0x3dj4jvBhxCw==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/core-common/-/core-common-6.5.8.tgz", + "integrity": "sha512-ELGKLMx1d0oEA2LT+fsmo85X2RNE1EO+It7B1bw//g7jyf1hmZ7t3lXMZUCqt7eml1qy1N72LDkfmmU+H9H6ww==", "dev": true, "dependencies": { "@babel/core": "^7.12.10", @@ -8707,7 +8716,7 @@ "@babel/preset-react": "^7.12.10", "@babel/preset-typescript": "^7.12.7", "@babel/register": "^7.12.1", - "@storybook/node-logger": "6.5.7", + "@storybook/node-logger": "6.5.8", "@storybook/semver": "^7.3.2", "@types/node": "^14.0.10 || ^16.0.0", "@types/pretty-hrtime": "^1.0.0", @@ -9506,7 +9515,7 @@ "node_modules/@storybook/core-common/node_modules/to-regex-range": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", - "integrity": "sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", "dev": true, "dependencies": { "is-number": "^3.0.0", @@ -9595,9 +9604,9 @@ "dev": true }, "node_modules/@storybook/core-events": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/core-events/-/core-events-6.5.7.tgz", - "integrity": "sha512-epqYy67Ypry5QdCt7FpN57/X9uuS7R2+DLFORZIpL/SJG1dIdN4POQ1icWOhPzHl+eiSgaV7e2oPaUsN+LPhJQ==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/core-events/-/core-events-6.5.8.tgz", + "integrity": "sha512-lzG4Lg65WFYvjs2k/E3CP4+eyPexEGrDyRMO9Pbj9H9x+eosYptauEbT/wXF83bmUWZKLWWVUAZX7hDcxBO8cw==", "dev": true, "dependencies": { "core-js": "^3.8.2" @@ -9608,23 +9617,23 @@ } }, "node_modules/@storybook/core-server": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/core-server/-/core-server-6.5.7.tgz", - "integrity": "sha512-CGwFZ5kmKaCS/+tcrAbqQu4Owq86wXkWRapJB55S8AlUsf3c9gEC8a3+Ed9tZUlmjSH56CnDDfmt7AleToaQ9w==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/core-server/-/core-server-6.5.8.tgz", + "integrity": "sha512-ti7+MW1xzD9O0JLwgZTwulxhJx5YGPNu+hRpGhJSjKrqGX1h6K6ilmkBSHvyLqpiE+F4mxvqb5Rx3KBIEdEgbw==", "dev": true, "dependencies": { "@discoveryjs/json-ext": "^0.5.3", - "@storybook/builder-webpack4": "6.5.7", - "@storybook/core-client": "6.5.7", - "@storybook/core-common": "6.5.7", - "@storybook/core-events": "6.5.7", + "@storybook/builder-webpack4": "6.5.8", + "@storybook/core-client": "6.5.8", + "@storybook/core-common": "6.5.8", + "@storybook/core-events": "6.5.8", "@storybook/csf": "0.0.2--canary.4566f4d.1", - "@storybook/csf-tools": "6.5.7", - "@storybook/manager-webpack4": "6.5.7", - "@storybook/node-logger": "6.5.7", + "@storybook/csf-tools": "6.5.8", + "@storybook/manager-webpack4": "6.5.8", + "@storybook/node-logger": "6.5.8", "@storybook/semver": "^7.3.2", - "@storybook/store": "6.5.7", - "@storybook/telemetry": "6.5.7", + "@storybook/store": "6.5.8", + "@storybook/telemetry": "6.5.8", "@types/node": "^14.0.10 || ^16.0.0", "@types/node-fetch": "^2.5.7", "@types/pretty-hrtime": "^1.0.0", @@ -10332,7 +10341,7 @@ "node_modules/@storybook/core-server/node_modules/to-regex-range": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", - "integrity": "sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", "dev": true, "dependencies": { "is-number": "^3.0.0", @@ -10415,9 +10424,9 @@ } }, "node_modules/@storybook/core-server/node_modules/ws": { - "version": "8.7.0", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.7.0.tgz", - "integrity": "sha512-c2gsP0PRwcLFzUiA8Mkr37/MI7ilIlHQxaEAtd0uNMbVMoy8puJyafRlm0bV9MbGSabUPeLrRRaqIBcFcA2Pqg==", + "version": "8.8.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.8.0.tgz", + "integrity": "sha512-JDAgSYQ1ksuwqfChJusw1LSJ8BizJ2e/vVu5Lxjq3YvNJNlROv1ui4i+c/kUUrPheBvQl4c5UbERhTwKa6QBJQ==", "dev": true, "engines": { "node": ">=10.0.0" @@ -10451,9 +10460,9 @@ } }, "node_modules/@storybook/csf-tools": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/csf-tools/-/csf-tools-6.5.7.tgz", - "integrity": "sha512-/vBaknzD8c7H/Zsz0gwhmlNlMwe5slZwXadi6rAQXDkKLzaR1kmz4cQFs8yDR1wWpXaGjNvQxOUAGYjFoGQxzA==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/csf-tools/-/csf-tools-6.5.8.tgz", + "integrity": "sha512-4VrjIMxKcp29OFSMDub52aQOMP4EvtZ5eWZkPeORRNQoJsnQaxhF9GGf71QdSaAQZhMoxdvmpA47ehrFk8Rnfw==", "dev": true, "dependencies": { "@babel/core": "^7.12.10", @@ -10521,14 +10530,14 @@ } }, "node_modules/@storybook/docs-tools": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/docs-tools/-/docs-tools-6.5.7.tgz", - "integrity": "sha512-Aw9uUsqeuw0Z9fpiwxrstMNjNGB9s1Tm57SpMF8ibjLYBYFf5Apz5CwDX7bm6YFtCweaawx4MeQta8qnQMWCFw==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/docs-tools/-/docs-tools-6.5.8.tgz", + "integrity": "sha512-CWMW+3LSstvQmHKV5ggPR1beQZTpwCXEhfysZ9u4Yp/4fcoDIuQ7DTOK5uNFynGCGl1FG3lATriEOhEZ3bZCvQ==", "dev": true, "dependencies": { "@babel/core": "^7.12.10", "@storybook/csf": "0.0.2--canary.4566f4d.1", - "@storybook/store": "6.5.7", + "@storybook/store": "6.5.8", "core-js": "^3.8.2", "doctrine": "^3.0.0", "lodash": "^4.17.21", @@ -10540,20 +10549,20 @@ } }, "node_modules/@storybook/manager-webpack4": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/manager-webpack4/-/manager-webpack4-6.5.7.tgz", - "integrity": "sha512-RmGsr/6PNsafaSm8aTD7e2VXSKT8BQ6Hkg6TAArLoS2TpIUvrNuM2hEqOHzm2POcApC+OE/HN1H0GiXBkH533Q==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/manager-webpack4/-/manager-webpack4-6.5.8.tgz", + "integrity": "sha512-qW5/L3cJHvtNi5ylDxObALZWaAHMsWQlPP8GRxm95NHpff4CfRo/qs7puY9ZeLmJSic0KchoHEH/8AScflLOgA==", "dev": true, "dependencies": { "@babel/core": "^7.12.10", "@babel/plugin-transform-template-literals": "^7.12.1", "@babel/preset-react": "^7.12.10", - "@storybook/addons": "6.5.7", - "@storybook/core-client": "6.5.7", - "@storybook/core-common": "6.5.7", - "@storybook/node-logger": "6.5.7", - "@storybook/theming": "6.5.7", - "@storybook/ui": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/core-client": "6.5.8", + "@storybook/core-common": "6.5.8", + "@storybook/node-logger": "6.5.8", + "@storybook/theming": "6.5.8", + "@storybook/ui": "6.5.8", "@types/node": "^14.0.10 || ^16.0.0", "@types/webpack": "^4.41.26", "babel-loader": "^8.0.0", @@ -11316,7 +11325,7 @@ "node_modules/@storybook/manager-webpack4/node_modules/to-regex-range": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", - "integrity": "sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", "dev": true, "dependencies": { "is-number": "^3.0.0", @@ -11428,20 +11437,20 @@ "dev": true }, "node_modules/@storybook/manager-webpack5": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/manager-webpack5/-/manager-webpack5-6.5.7.tgz", - "integrity": "sha512-4TZKe71noCRui8sUxSuSqO6zMnCxCLn7dE1dOlCr/UvyZbCaGWACO5olUDQrT+n1glZL8i9L998JGQroksucNw==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/manager-webpack5/-/manager-webpack5-6.5.8.tgz", + "integrity": "sha512-foW/ZvTqGZAl4TfcfGKdS3RlaBDDAgEjUCbCaVShlZRshZ8tzWBVu3JQFqbPVGslH89T5qp9DUYoN/SJqTUpcg==", "dev": true, "dependencies": { "@babel/core": "^7.12.10", "@babel/plugin-transform-template-literals": "^7.12.1", "@babel/preset-react": "^7.12.10", - "@storybook/addons": "6.5.7", - "@storybook/core-client": "6.5.7", - "@storybook/core-common": "6.5.7", - "@storybook/node-logger": "6.5.7", - "@storybook/theming": "6.5.7", - "@storybook/ui": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/core-client": "6.5.8", + "@storybook/core-common": "6.5.8", + "@storybook/node-logger": "6.5.8", + "@storybook/theming": "6.5.8", + "@storybook/ui": "6.5.8", "@types/node": "^14.0.10 || ^16.0.0", "babel-loader": "^8.0.0", "case-sensitive-paths-webpack-plugin": "^2.3.0", @@ -11493,9 +11502,9 @@ "dev": true }, "node_modules/@storybook/manager-webpack5/node_modules/acorn": { - "version": "8.7.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.0.tgz", - "integrity": "sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ==", + "version": "8.7.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.1.tgz", + "integrity": "sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A==", "dev": true, "optional": true, "peer": true, @@ -12273,9 +12282,9 @@ } }, "node_modules/@storybook/node-logger": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/node-logger/-/node-logger-6.5.7.tgz", - "integrity": "sha512-OrHu5p2E5i7P2v2hQAOtZw6Od1e2nrP6L7w5SxUPgccUnKUD9dRX5Y8qbAcPZO3XCkMLjpjAbC1xBXG0eFkn9g==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/node-logger/-/node-logger-6.5.8.tgz", + "integrity": "sha512-BHdkSipgjnfsh4FRYbV2R0npM5gVx9JLRsDQ0KiTolRpN4SU98kT/6885zb9jZg6I0EY+UG9Qdr3fvL9VLpY1g==", "dev": true, "dependencies": { "@types/npmlog": "^4.1.2", @@ -12360,9 +12369,9 @@ } }, "node_modules/@storybook/postinstall": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/postinstall/-/postinstall-6.5.7.tgz", - "integrity": "sha512-902JjgB2o+NiiLCPV0b4GHX9SbnY1OkvfvmkqpD3UqWh8djpkSQwvli9npM1J2NEu4BxCqbifYJI7V4JmZbdsw==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/postinstall/-/postinstall-6.5.8.tgz", + "integrity": "sha512-Z6zQnBOaMj+gHtF1XPMpwTIxYRCmh6eNirrJLrkPk5c+fKXtw6+vNCbmPvsyTGxGEHnyn/tYwe1fvwJTHDctUw==", "dev": true, "dependencies": { "core-js": "^3.8.2" @@ -12581,9 +12590,9 @@ } }, "node_modules/@storybook/preset-create-react-app/node_modules/type-fest": { - "version": "2.11.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.11.0.tgz", - "integrity": "sha512-GwRKR1jZMAQP/hVR929DWB5Z2lwSIM/nNcHEfDj2E0vOMhcYbqFxGKE5JaSzMdzmEtWJiamEn6VwHs/YVXVhEQ==", + "version": "2.13.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.13.0.tgz", + "integrity": "sha512-lPfAm42MxE4/456+QyIaaVBAwgpJb6xZ8PRu09utnhPdWwcyj9vgy6Sq0Z5yNbJ21EdxB5dRU/Qg8bsyAMtlcw==", "dev": true, "optional": true, "peer": true, @@ -12595,17 +12604,17 @@ } }, "node_modules/@storybook/preview-web": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/preview-web/-/preview-web-6.5.7.tgz", - "integrity": "sha512-EH8gdl334D8EDVL1VJjRURcUou5Sv6BwgismL4E6wjSFmWxL9egxYDnGJJEh3mjIkAtGb0zpksYn/VNWPA8c8A==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/preview-web/-/preview-web-6.5.8.tgz", + "integrity": "sha512-jEEAgvTVZfFA0B20mRJfLW6dPA5mG5PxUJtjMx6wH4Yw4+i3Sld/U63hTRt7ktpKdrcu4lX9E+PuaRLPq7S2kg==", "dev": true, "dependencies": { - "@storybook/addons": "6.5.7", - "@storybook/channel-postmessage": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/core-events": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/channel-postmessage": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/core-events": "6.5.8", "@storybook/csf": "0.0.2--canary.4566f4d.1", - "@storybook/store": "6.5.7", + "@storybook/store": "6.5.8", "ansi-to-html": "^0.6.11", "core-js": "^3.8.2", "global": "^4.4.0", @@ -12627,24 +12636,24 @@ } }, "node_modules/@storybook/react": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/react/-/react-6.5.7.tgz", - "integrity": "sha512-jMY1vk1WL1otEODl5BxD1kSh5Eqg+SvZW5CJ7sS6q53i3teOhaGhugvuSTuV9lnBzLOZu8atIdFL0ewdOkpwsg==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/react/-/react-6.5.8.tgz", + "integrity": "sha512-LdObfhhPb9gAFBtRNb3awYJe1qMiYeda1ppkj0ZvccbV04YrmbW5bzYvfOCvU6D34ugbQJhJyWuvraO/0EJK6w==", "dev": true, "dependencies": { "@babel/preset-flow": "^7.12.1", "@babel/preset-react": "^7.12.10", "@pmmmwh/react-refresh-webpack-plugin": "^0.5.3", - "@storybook/addons": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/core": "6.5.7", - "@storybook/core-common": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/core": "6.5.8", + "@storybook/core-common": "6.5.8", "@storybook/csf": "0.0.2--canary.4566f4d.1", - "@storybook/docs-tools": "6.5.7", - "@storybook/node-logger": "6.5.7", + "@storybook/docs-tools": "6.5.8", + "@storybook/node-logger": "6.5.8", "@storybook/react-docgen-typescript-plugin": "1.0.2-canary.6.9d540b91e815f8fc2f8829189deb00553559ff63.0", "@storybook/semver": "^7.3.2", - "@storybook/store": "6.5.7", + "@storybook/store": "6.5.8", "@types/estree": "^0.0.51", "@types/node": "^14.14.20 || ^16.0.0", "@types/webpack-env": "^1.16.0", @@ -12970,9 +12979,9 @@ "dev": true }, "node_modules/@storybook/react/node_modules/type-fest": { - "version": "2.11.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.11.0.tgz", - "integrity": "sha512-GwRKR1jZMAQP/hVR929DWB5Z2lwSIM/nNcHEfDj2E0vOMhcYbqFxGKE5JaSzMdzmEtWJiamEn6VwHs/YVXVhEQ==", + "version": "2.13.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.13.0.tgz", + "integrity": "sha512-lPfAm42MxE4/456+QyIaaVBAwgpJb6xZ8PRu09utnhPdWwcyj9vgy6Sq0Z5yNbJ21EdxB5dRU/Qg8bsyAMtlcw==", "dev": true, "optional": true, "peer": true, @@ -12993,12 +13002,12 @@ } }, "node_modules/@storybook/router": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/router/-/router-6.5.7.tgz", - "integrity": "sha512-edWEdAb8O0rSgdXoBZDDuNlQg2cOmC/nJ6gXj9zBotzmXqsbxWyjKGooG1dU6dnKshUqE1RmWF7/N1WMluLf0A==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/router/-/router-6.5.8.tgz", + "integrity": "sha512-tseNJpZ2ZzVYowjekUMpGJVVRMrwOkttieD9mRbHrhh+2n7b+SoMKnuLi3ow0xeOyPL8ZDng2FgRjQzQHXA5Sw==", "dev": true, "dependencies": { - "@storybook/client-logger": "6.5.7", + "@storybook/client-logger": "6.5.8", "core-js": "^3.8.2", "regenerator-runtime": "^0.13.7" }, @@ -13028,13 +13037,13 @@ } }, "node_modules/@storybook/source-loader": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/source-loader/-/source-loader-6.5.7.tgz", - "integrity": "sha512-nj24TSGdF9J1gD5Fj9Z2hPRAQwqBJoBKD/fmTSFZop0qaJOOyeuxZR5022dQh8UWWoBa3WOQADMTNi5RqQZkiA==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/source-loader/-/source-loader-6.5.8.tgz", + "integrity": "sha512-3bVxXKE2o6lS4WGga/S7WwgITxPQ96qsY+pQ1I7A+e4/cKSmZxlVWM9qfMW2ScmHTVoZE0Ujsmn6DWftxzCyrQ==", "dev": true, "dependencies": { - "@storybook/addons": "6.5.7", - "@storybook/client-logger": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/client-logger": "6.5.8", "@storybook/csf": "0.0.2--canary.4566f4d.1", "core-js": "^3.8.2", "estraverse": "^5.2.0", @@ -13101,14 +13110,14 @@ } }, "node_modules/@storybook/store": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/store/-/store-6.5.7.tgz", - "integrity": "sha512-d64towcdylC6TXNL2oJklCpwN3XcUGgZzQ9zgoV8BUlOlsj9tNq8eo95uzTURnLg1Q5uHoDDKWuXrrKj03HHxw==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/store/-/store-6.5.8.tgz", + "integrity": "sha512-5rhGjN/O0oLebRv947B0vgapq48qBBBYYOgq4krRUYU2ecS6LUgtAHR/kTa324o9aBO8cnIXHH78jZcSvMiJlQ==", "dev": true, "dependencies": { - "@storybook/addons": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/core-events": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/core-events": "6.5.8", "@storybook/csf": "0.0.2--canary.4566f4d.1", "core-js": "^3.8.2", "fast-deep-equal": "^3.1.3", @@ -13132,13 +13141,13 @@ } }, "node_modules/@storybook/telemetry": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/telemetry/-/telemetry-6.5.7.tgz", - "integrity": "sha512-RHrjAConMqGIsu1TgNXztWtWOXTvvCHDWyGoLagCgZYgjGJ4sukp+ZtrbkayNDkkWWD0lpMzsdDEYCJuru/Sig==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/telemetry/-/telemetry-6.5.8.tgz", + "integrity": "sha512-QnAhYF8CwcjC1bT2PK7Zqvo6E42TPl0MY6JS+H6qSZU/BmYeS0It8ZURNfPsA/OzVVLHUkQs96CisKh3N0WWaw==", "dev": true, "dependencies": { - "@storybook/client-logger": "6.5.7", - "@storybook/core-common": "6.5.7", + "@storybook/client-logger": "6.5.8", + "@storybook/core-common": "6.5.8", "chalk": "^4.1.0", "core-js": "^3.8.2", "detect-package-manager": "^2.0.1", @@ -13262,12 +13271,12 @@ } }, "node_modules/@storybook/theming": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/theming/-/theming-6.5.7.tgz", - "integrity": "sha512-6zp1V84DSBcS8BtFOCJlF2/nIonjQmr+dILPxaM3lCm/X003i2jAQrBKTfPlmzCeDn07PBhzHaRJ3wJskfmeNw==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/theming/-/theming-6.5.8.tgz", + "integrity": "sha512-1VaKHqj38Ls1bJwDpg3+aEOlvVib/DCFkP6WYrP/AQtNAzxiyw5WkaoRlTVJZvCdu5TxjpG4O6/Ai5TI9QftIg==", "dev": true, "dependencies": { - "@storybook/client-logger": "6.5.7", + "@storybook/client-logger": "6.5.8", "core-js": "^3.8.2", "regenerator-runtime": "^0.13.7" }, @@ -13281,20 +13290,20 @@ } }, "node_modules/@storybook/ui": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/ui/-/ui-6.5.7.tgz", - "integrity": "sha512-NOg44bc/w7FweuM2fa99PxsgI9qoG2p5vhTQ4MOI/7QnOUDn+EenlapsRos+/Sk2XTaB2QmM43boUkravMSouA==", - "dev": true, - "dependencies": { - "@storybook/addons": "6.5.7", - "@storybook/api": "6.5.7", - "@storybook/channels": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/components": "6.5.7", - "@storybook/core-events": "6.5.7", - "@storybook/router": "6.5.7", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/ui/-/ui-6.5.8.tgz", + "integrity": "sha512-rL09kxgY9pCVbxr/VUK4b5FL5VbALfciZR+50sNT1EcTDb9k0OPeqx7a4Ptc+KNkgyPdSTxUGvhzVqH5PYrhZQ==", + "dev": true, + "dependencies": { + "@storybook/addons": "6.5.8", + "@storybook/api": "6.5.8", + "@storybook/channels": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/components": "6.5.8", + "@storybook/core-events": "6.5.8", + "@storybook/router": "6.5.8", "@storybook/semver": "^7.3.2", - "@storybook/theming": "6.5.7", + "@storybook/theming": "6.5.8", "core-js": "^3.8.2", "regenerator-runtime": "^0.13.7", "resolve-from": "^5.0.0" @@ -14183,11 +14192,6 @@ "@types/node": "*" } }, - "node_modules/@types/invariant": { - "version": "2.2.33", - "resolved": "https://registry.npmjs.org/@types/invariant/-/invariant-2.2.33.tgz", - "integrity": "sha512-/jUNmS8d4bCKdqslfxW6dg/9Gksfzxz67IYfqApHn+HvHlMVXwYv2zpTDnS/yaK9BB0i0GlBTaYci0EFE62Hmw==" - }, "node_modules/@types/is-function": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/@types/is-function/-/is-function-1.0.1.tgz", @@ -15883,20 +15887,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/ansi-html": { - "version": "0.0.7", - "resolved": "https://registry.npmjs.org/ansi-html/-/ansi-html-0.0.7.tgz", - "integrity": "sha1-gTWEAhliqenm/QOflA0S9WynhZ4=", - "dev": true, - "engines": [ - "node >= 0.8.0" - ], - "optional": true, - "peer": true, - "bin": { - "ansi-html": "bin/ansi-html" - } - }, "node_modules/ansi-html-community": { "version": "0.0.8", "resolved": "https://registry.npmjs.org/ansi-html-community/-/ansi-html-community-0.0.8.tgz", @@ -16274,14 +16264,6 @@ "dev": true, "optional": true }, - "node_modules/async-limiter": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/async-limiter/-/async-limiter-1.0.1.tgz", - "integrity": "sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ==", - "dev": true, - "optional": true, - "peer": true - }, "node_modules/asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", @@ -18541,7 +18523,8 @@ "node_modules/commondir": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", - "integrity": "sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs=" + "integrity": "sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs=", + "dev": true }, "node_modules/compare-versions": { "version": "3.6.0", @@ -19159,7 +19142,7 @@ "node_modules/cpy/node_modules/to-regex-range": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", - "integrity": "sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", "dev": true, "dependencies": { "is-number": "^3.0.0", @@ -19741,159 +19724,6 @@ "node": ">=0.10.0" } }, - "node_modules/default-gateway": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/default-gateway/-/default-gateway-4.2.0.tgz", - "integrity": "sha512-h6sMrVB1VMWVrW13mSc6ia/DwYYw5MN6+exNu1OaJeFac5aSAvwM7lZ0NVfTABuSkQelr4h5oebg3KB1XPdjgA==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "execa": "^1.0.0", - "ip-regex": "^2.1.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/default-gateway/node_modules/cross-spawn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", - "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "nice-try": "^1.0.4", - "path-key": "^2.0.1", - "semver": "^5.5.0", - "shebang-command": "^1.2.0", - "which": "^1.2.9" - }, - "engines": { - "node": ">=4.8" - } - }, - "node_modules/default-gateway/node_modules/execa": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/execa/-/execa-1.0.0.tgz", - "integrity": "sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "cross-spawn": "^6.0.0", - "get-stream": "^4.0.0", - "is-stream": "^1.1.0", - "npm-run-path": "^2.0.0", - "p-finally": "^1.0.0", - "signal-exit": "^3.0.0", - "strip-eof": "^1.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/default-gateway/node_modules/get-stream": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", - "integrity": "sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "pump": "^3.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/default-gateway/node_modules/is-stream": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", - "integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ=", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/default-gateway/node_modules/npm-run-path": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-2.0.2.tgz", - "integrity": "sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8=", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "path-key": "^2.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/default-gateway/node_modules/path-key": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", - "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/default-gateway/node_modules/semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", - "dev": true, - "optional": true, - "peer": true, - "bin": { - "semver": "bin/semver" - } - }, - "node_modules/default-gateway/node_modules/shebang-command": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "shebang-regex": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/default-gateway/node_modules/shebang-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/default-gateway/node_modules/which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "which": "bin/which" - } - }, "node_modules/defaults": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/defaults/-/defaults-1.0.3.tgz", @@ -20103,94 +19933,6 @@ "node": ">= 0.8.0" } }, - "node_modules/del": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/del/-/del-4.1.1.tgz", - "integrity": "sha512-QwGuEUouP2kVwQenAsOof5Fv8K9t3D8Ca8NxcXKrIpEHjTXK5J2nXLdP+ALI1cgv8wj7KuwBhTwBkOZSJKM5XQ==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "@types/glob": "^7.1.1", - "globby": "^6.1.0", - "is-path-cwd": "^2.0.0", - "is-path-in-cwd": "^2.0.0", - "p-map": "^2.0.0", - "pify": "^4.0.1", - "rimraf": "^2.6.3" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/del/node_modules/array-union": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/array-union/-/array-union-1.0.2.tgz", - "integrity": "sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk=", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "array-uniq": "^1.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/del/node_modules/globby": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/globby/-/globby-6.1.0.tgz", - "integrity": "sha1-9abXDoOV4hyFj7BInWTfAkJNUGw=", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "array-union": "^1.0.1", - "glob": "^7.0.3", - "object-assign": "^4.0.1", - "pify": "^2.0.0", - "pinkie-promise": "^2.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/del/node_modules/globby/node_modules/pify": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/del/node_modules/p-map": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/p-map/-/p-map-2.1.0.tgz", - "integrity": "sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw==", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/del/node_modules/rimraf": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", - "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" - } - }, "node_modules/delayed-stream": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", @@ -20789,6 +20531,29 @@ "node": ">= 0.8" } }, + "node_modules/encoding": { + "version": "0.1.13", + "resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.13.tgz", + "integrity": "sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==", + "optional": true, + "peer": true, + "dependencies": { + "iconv-lite": "^0.6.2" + } + }, + "node_modules/encoding/node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "optional": true, + "peer": true, + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/end-of-stream": { "version": "1.4.4", "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", @@ -21015,22 +20780,6 @@ "esbuild-windows-arm64": "0.14.39" } }, - "node_modules/esbuild-linux-64": { - "version": "0.14.39", - "resolved": "https://registry.npmjs.org/esbuild-linux-64/-/esbuild-linux-64-0.14.39.tgz", - "integrity": "sha512-4tcgFDYWdI+UbNMGlua9u1Zhu0N5R6u9tl5WOM8aVnNX143JZoBZLpCuUr5lCKhnD0SCO+5gUyMfupGrHtfggQ==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, "node_modules/escalade": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", @@ -21395,6 +21144,22 @@ "node": ">=4" } }, + "node_modules/eslint-plugin-css-modules": { + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-css-modules/-/eslint-plugin-css-modules-2.11.0.tgz", + "integrity": "sha512-CLvQvJOMlCywZzaI4HVu7QH/ltgNXvCg7giJGiE+sA9wh5zQ+AqTgftAzrERV22wHe1p688wrU/Zwxt1Ry922w==", + "dev": true, + "dependencies": { + "gonzales-pe": "^4.0.3", + "lodash": "^4.17.2" + }, + "engines": { + "node": ">=4.0.0" + }, + "peerDependencies": { + "eslint": ">=2.0.0" + } + }, "node_modules/eslint-plugin-flowtype": { "version": "8.0.3", "resolved": "https://registry.npmjs.org/eslint-plugin-flowtype/-/eslint-plugin-flowtype-8.0.3.tgz", @@ -22004,11 +21769,6 @@ "node": ">=8.3.0" } }, - "node_modules/estree-walker": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-0.6.1.tgz", - "integrity": "sha512-SqmZANLWS0mnatqbSfRP5g8OXZC12Fgg1IwNtLsyHDzJizORW4khDfjPqJZsemPWBB2uqykUah5YpQ6epsqC/w==" - }, "node_modules/esutils": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", @@ -22050,20 +21810,6 @@ "node": ">=0.8.x" } }, - "node_modules/eventsource": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-1.0.7.tgz", - "integrity": "sha512-4Ln17+vVT0k8aWq+t/bF5arcS3EpT9gYtW66EPacdj/mAFevznsnyoHLPy2BA8gbIQeIHoPsvwmfBftfcG//BQ==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "original": "^1.0.0" - }, - "engines": { - "node": ">=0.12.0" - } - }, "node_modules/evp_bytestokey": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz", @@ -22617,20 +22363,6 @@ "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/faye-websocket": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.10.0.tgz", - "integrity": "sha1-TkkvjQTftviQA1B/btvy1QHnxvQ=", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "websocket-driver": ">=0.5.1" - }, - "engines": { - "node": ">=0.4.0" - } - }, "node_modules/fb-watchman": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.1.tgz", @@ -22915,6 +22647,7 @@ "version": "3.3.1", "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.1.tgz", "integrity": "sha512-t2GDMt3oGC/v+BMwzmllWDuJF/xcDtE5j/fCGbqDD7OLuJkj0cfh1YSA5VKPvwMeLFLNDBkwOKZ2X85jGLVftQ==", + "dev": true, "dependencies": { "commondir": "^1.0.1", "make-dir": "^3.0.2", @@ -22931,6 +22664,7 @@ "version": "4.1.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, "dependencies": { "locate-path": "^5.0.0", "path-exists": "^4.0.0" @@ -22955,36 +22689,36 @@ } }, "node_modules/firebase": { - "version": "9.8.2", - "resolved": "https://registry.npmjs.org/firebase/-/firebase-9.8.2.tgz", - "integrity": "sha512-cVPpiR18vsLuGWAAVkVhNO6mYsEgYBqawvMI2zxKo2FCtneyBgMwOyWKI8VyCmL5ze5p5QJTPjkoatM6rZkd0Q==", - "dependencies": { - "@firebase/analytics": "0.7.9", - "@firebase/analytics-compat": "0.1.10", - "@firebase/app": "0.7.25", - "@firebase/app-check": "0.5.8", - "@firebase/app-check-compat": "0.2.8", - "@firebase/app-compat": "0.1.26", + "version": "9.8.3", + "resolved": "https://registry.npmjs.org/firebase/-/firebase-9.8.3.tgz", + "integrity": "sha512-PCThy5cFXnbiUtFPJ9vVdcG7wKibOKNR+iuNXf+54xMGJzYb+rM2P8GUqtr2fhVQkfs42uJ6gGKG4soNGkP64w==", + "dependencies": { + "@firebase/analytics": "0.7.10", + "@firebase/analytics-compat": "0.1.11", + "@firebase/app": "0.7.26", + "@firebase/app-check": "0.5.9", + "@firebase/app-check-compat": "0.2.9", + "@firebase/app-compat": "0.1.27", "@firebase/app-types": "0.7.0", - "@firebase/auth": "0.20.2", - "@firebase/auth-compat": "0.2.15", - "@firebase/database": "0.13.0", - "@firebase/database-compat": "0.2.0", - "@firebase/firestore": "3.4.9", - "@firebase/firestore-compat": "0.1.18", - "@firebase/functions": "0.8.1", - "@firebase/functions-compat": "0.2.1", - "@firebase/installations": "0.5.9", - "@firebase/messaging": "0.9.13", - "@firebase/messaging-compat": "0.1.13", - "@firebase/performance": "0.5.9", - "@firebase/performance-compat": "0.1.9", + "@firebase/auth": "0.20.3", + "@firebase/auth-compat": "0.2.16", + "@firebase/database": "0.13.1", + "@firebase/database-compat": "0.2.1", + "@firebase/firestore": "3.4.10", + "@firebase/firestore-compat": "0.1.19", + "@firebase/functions": "0.8.2", + "@firebase/functions-compat": "0.2.2", + "@firebase/installations": "0.5.10", + "@firebase/messaging": "0.9.14", + "@firebase/messaging-compat": "0.1.14", + "@firebase/performance": "0.5.10", + "@firebase/performance-compat": "0.1.10", "@firebase/polyfill": "0.3.36", - "@firebase/remote-config": "0.3.8", - "@firebase/remote-config-compat": "0.1.9", - "@firebase/storage": "0.9.6", - "@firebase/storage-compat": "0.1.14", - "@firebase/util": "1.6.0" + "@firebase/remote-config": "0.3.9", + "@firebase/remote-config-compat": "0.1.10", + "@firebase/storage": "0.9.7", + "@firebase/storage-compat": "0.1.15", + "@firebase/util": "1.6.1" } }, "node_modules/flat": { @@ -23362,14 +23096,43 @@ "node": ">=0.10.0" } }, + "node_modules/framer-motion": { + "version": "6.3.11", + "resolved": "https://registry.npmjs.org/framer-motion/-/framer-motion-6.3.11.tgz", + "integrity": "sha512-xQLk+ZSklNs5QNCUmdWPpKMOuWiB8ZETsvcIOWw8xvri9K3TamuifgCI/B6XpaEDR0/V2ZQF2Wm+gUAZrXo+rw==", + "dependencies": { + "framesync": "6.0.1", + "hey-listen": "^1.0.8", + "popmotion": "11.0.3", + "style-value-types": "5.0.0", + "tslib": "^2.1.0" + }, + "optionalDependencies": { + "@emotion/is-prop-valid": "^0.8.2" + }, + "peerDependencies": { + "react": ">=16.8 || ^17.0.0 || ^18.0.0", + "react-dom": ">=16.8 || ^17.0.0 || ^18.0.0" + } + }, + "node_modules/framer-motion/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, "node_modules/framesync": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/framesync/-/framesync-4.1.0.tgz", - "integrity": "sha512-MmgZ4wCoeVxNbx2xp5hN/zPDCbLSKiDt4BbbslK7j/pM2lg5S0vhTNv1v8BCVb99JPIo6hXBFdwzU7Q4qcAaoQ==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/framesync/-/framesync-6.0.1.tgz", + "integrity": "sha512-fUY88kXvGiIItgNC7wcTOl0SNRCVXMKSWW2Yzfmn7EKNc+MpCzcz9DhdHcdjbrtN3c6R4H5dTY2jiCpPdysEjA==", "dependencies": { - "hey-listen": "^1.0.5" + "tslib": "^2.1.0" } }, + "node_modules/framesync/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, "node_modules/fresh": { "version": "0.5.2", "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", @@ -23417,6 +23180,7 @@ "version": "8.1.0", "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-8.1.0.tgz", "integrity": "sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==", + "dev": true, "dependencies": { "graceful-fs": "^4.2.0", "jsonfile": "^4.0.0", @@ -23886,6 +23650,21 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/gonzales-pe": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/gonzales-pe/-/gonzales-pe-4.3.0.tgz", + "integrity": "sha512-otgSPpUmdWJ43VXyiNgEYE4luzHCL2pz4wQ0OnDluC6Eg4Ko3Vexy/SrSynglw/eR+OhkzmqFCZa/OFa/RgAOQ==", + "dev": true, + "dependencies": { + "minimist": "^1.2.5" + }, + "bin": { + "gonzales": "bin/gonzales.js" + }, + "engines": { + "node": ">=0.6.0" + } + }, "node_modules/got": { "version": "9.6.0", "resolved": "https://registry.npmjs.org/got/-/got-9.6.0.tgz", @@ -23923,7 +23702,8 @@ "node_modules/graceful-fs": { "version": "4.2.9", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.9.tgz", - "integrity": "sha512-NtNxqUcXgpW2iMrfqSfR73Glt39K+BLwWsPs94yR63v45T0Wbej7eRmL5cWfwEgqXnmjQp3zaJTshdRW/qC2ZQ==" + "integrity": "sha512-NtNxqUcXgpW2iMrfqSfR73Glt39K+BLwWsPs94yR63v45T0Wbej7eRmL5cWfwEgqXnmjQp3zaJTshdRW/qC2ZQ==", + "dev": true }, "node_modules/gzip-size": { "version": "6.0.0", @@ -24442,14 +24222,6 @@ "node": ">=10" } }, - "node_modules/html-entities": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/html-entities/-/html-entities-1.4.0.tgz", - "integrity": "sha512-8nxjcBcd8wovbeKx7h3wTji4e6+rhaVuPNpMqwWgnHh+N9ToqsCs6XztWRBPQ+UtzsoMAdKZtUENoVzU/EMtZA==", - "dev": true, - "optional": true, - "peer": true - }, "node_modules/html-escaper": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", @@ -24676,160 +24448,6 @@ "node": ">= 6" } }, - "node_modules/http-proxy-middleware": { - "version": "0.19.1", - "resolved": "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-0.19.1.tgz", - "integrity": "sha512-yHYTgWMQO8VvwNS22eLLloAkvungsKdKTLO8AJlftYIKNfJr3GK3zK0ZCfzDDGUBttdGc8xFy1mCitvNKQtC3Q==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "http-proxy": "^1.17.0", - "is-glob": "^4.0.0", - "lodash": "^4.17.11", - "micromatch": "^3.1.10" - }, - "engines": { - "node": ">=4.0.0" - } - }, - "node_modules/http-proxy-middleware/node_modules/braces": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", - "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "arr-flatten": "^1.1.0", - "array-unique": "^0.3.2", - "extend-shallow": "^2.0.1", - "fill-range": "^4.0.0", - "isobject": "^3.0.1", - "repeat-element": "^1.1.2", - "snapdragon": "^0.8.1", - "snapdragon-node": "^2.0.1", - "split-string": "^3.0.2", - "to-regex": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/http-proxy-middleware/node_modules/braces/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "is-extendable": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/http-proxy-middleware/node_modules/fill-range": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", - "integrity": "sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc=", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "extend-shallow": "^2.0.1", - "is-number": "^3.0.0", - "repeat-string": "^1.6.1", - "to-regex-range": "^2.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/http-proxy-middleware/node_modules/fill-range/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "is-extendable": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/http-proxy-middleware/node_modules/is-number": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", - "integrity": "sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU=", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "kind-of": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/http-proxy-middleware/node_modules/is-number/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/http-proxy-middleware/node_modules/micromatch": { - "version": "3.1.10", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", - "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "arr-diff": "^4.0.0", - "array-unique": "^0.3.2", - "braces": "^2.3.1", - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "extglob": "^2.0.4", - "fragment-cache": "^0.2.1", - "kind-of": "^6.0.2", - "nanomatch": "^1.2.9", - "object.pick": "^1.3.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/http-proxy-middleware/node_modules/to-regex-range": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", - "integrity": "sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "is-number": "^3.0.0", - "repeat-string": "^1.6.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/http2-client": { "version": "1.3.5", "resolved": "https://registry.npmjs.org/http2-client/-/http2-client-1.3.5.tgz", @@ -25517,21 +25135,6 @@ "node": ">=8" } }, - "node_modules/internal-ip": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/internal-ip/-/internal-ip-4.3.0.tgz", - "integrity": "sha512-S1zBo1D6zcsyuC6PMmY5+55YMILQ9av8lotMx447Bq6SAgo/sDK6y6uUKmuYhW7eacnIhFfsPmCNYdDzsnnDCg==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "default-gateway": "^4.2.0", - "ipaddr.js": "^1.9.0" - }, - "engines": { - "node": ">=6" - } - }, "node_modules/internal-slot": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.3.tgz", @@ -25590,17 +25193,6 @@ "integrity": "sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo=", "dev": true }, - "node_modules/ip-regex": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/ip-regex/-/ip-regex-2.1.0.tgz", - "integrity": "sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk=", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">=4" - } - }, "node_modules/ipaddr.js": { "version": "1.9.1", "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", @@ -26050,34 +25642,6 @@ "node": ">=6" } }, - "node_modules/is-path-in-cwd": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-path-in-cwd/-/is-path-in-cwd-2.1.0.tgz", - "integrity": "sha512-rNocXHgipO+rvnP6dk3zI20RpOtrAM/kzbB258Uw5BWr3TpXi861yzjo16Dn4hUox07iw5AyeMLHWsujkjzvRQ==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "is-path-inside": "^2.1.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/is-path-inside": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-2.1.0.tgz", - "integrity": "sha512-wiyhTzfDWsvwAW53OBWF5zuvaOGlZ6PwYxAbPVDhpm+gM09xKQGjBq/8uYN12aDvMxnAnq3dxTyoSoRNmg5YFg==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "path-is-inside": "^1.0.2" - }, - "engines": { - "node": ">=6" - } - }, "node_modules/is-plain-obj": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-3.0.0.tgz", @@ -30064,14 +29628,6 @@ "integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=", "dev": true }, - "node_modules/json3": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/json3/-/json3-3.3.3.tgz", - "integrity": "sha512-c7/8mbUsKigAbLkD5B010BK4D9LZm7A1pNItkEwiUZRpIN66exu/e7YQWysGun+TRKaJp8MhemM+VkfWv42aCA==", - "dev": true, - "optional": true, - "peer": true - }, "node_modules/json5": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz", @@ -30094,6 +29650,7 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz", "integrity": "sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss=", + "dev": true, "optionalDependencies": { "graceful-fs": "^4.1.6" } @@ -30189,14 +29746,6 @@ "json-buffer": "3.0.0" } }, - "node_modules/killable": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/killable/-/killable-1.0.1.tgz", - "integrity": "sha512-LzqtLKlUwirEUyl/nicirVmNiPvYs7l5n8wOPP7fyJVpUPkvCnW/vuiXGpylGUlnPDnB7311rARzAt3Mhswpjg==", - "dev": true, - "optional": true, - "peer": true - }, "node_modules/kind-of": { "version": "6.0.3", "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", @@ -30571,7 +30120,7 @@ "node_modules/load-json-file/node_modules/strip-bom": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-2.0.0.tgz", - "integrity": "sha1-YhmoVhZSBJHzV4i9vxRHqZx+aw4=", + "integrity": "sha512-kwrX1y7czp1E69n2ajbG65mIo9dqvJ+8aBQXOGVxqwvNbsXdFM6Lq37dLAY3mknUwru8CfcCbfOLL/gMo+fi3g==", "dev": true, "optional": true, "dependencies": { @@ -30608,6 +30157,7 @@ "version": "5.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, "dependencies": { "p-locate": "^4.1.0" }, @@ -30875,21 +30425,6 @@ "node": ">=8" } }, - "node_modules/loglevel": { - "version": "1.7.1", - "resolved": "https://registry.npmjs.org/loglevel/-/loglevel-1.7.1.tgz", - "integrity": "sha512-Hesni4s5UkWkwCGJMQGAh71PaLUmKFM60dHvq0zi/vDhhrzuk+4GgNbTXJ12YYQJn6ZKBDNIjYcuQGKudvqrIw==", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">= 0.6.0" - }, - "funding": { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/loglevel" - } - }, "node_modules/long": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz", @@ -31001,6 +30536,7 @@ "version": "3.1.0", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "dev": true, "dependencies": { "semver": "^6.0.0" }, @@ -31796,7 +31332,7 @@ "node_modules/meow/node_modules/strip-indent": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-1.0.1.tgz", - "integrity": "sha1-DHlipq3vp7vUrDZkYKY4VSrhoKI=", + "integrity": "sha512-I5iQq6aFMM62fBEAIB/hXzwJD6EEZ0xEGCX2t7oXqaKPIRgt4WruAQ285BISgdkP+HLGWyeGmNJcpIwFeRYRUA==", "dev": true, "optional": true, "dependencies": { @@ -32930,6 +32466,12 @@ "node": ">=10.0.0" } }, + "node_modules/monaco-editor": { + "version": "0.33.0", + "resolved": "https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.33.0.tgz", + "integrity": "sha512-VcRWPSLIUEgQJQIE0pVT8FcGBIgFoxz7jtqctE+IiCxWugD0DwgyQBcZBhdSrdMC84eumoqMZsGl2GTreOzwqw==", + "peer": true + }, "node_modules/move-concurrently": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/move-concurrently/-/move-concurrently-1.0.1.tgz", @@ -33230,17 +32772,6 @@ "webidl-conversions": "^3.0.0" } }, - "node_modules/node-forge": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz", - "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">= 6.0.0" - } - }, "node_modules/node-int64": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz", @@ -33834,9 +33365,9 @@ } }, "node_modules/openapi-types": { - "version": "11.0.1", - "resolved": "https://registry.npmjs.org/openapi-types/-/openapi-types-11.0.1.tgz", - "integrity": "sha512-P2pGRlHFXgP8z6vrp5P/MtftOXYtlIY1A+V0VmioOoo85NN6RSPgGbEprRAUNMIsbfRjnCPdx/r8mi8QRR7grQ==", + "version": "12.0.0", + "resolved": "https://registry.npmjs.org/openapi-types/-/openapi-types-12.0.0.tgz", + "integrity": "sha512-6Wd9k8nmGQHgCbehZCP6wwWcfXcvinhybUTBatuhjRsCxUIujuYFZc9QnGeae75CyHASewBtxs0HX/qwREReUw==", "dev": true, "peer": true }, @@ -33858,31 +33389,6 @@ "opencollective-postinstall": "index.js" } }, - "node_modules/opn": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/opn/-/opn-5.5.0.tgz", - "integrity": "sha512-PqHpggC9bLV0VeWcdKhkpxY+3JTzetLSqTCWL/z/tFIbI6G8JCjondXklT1JinczLz2Xib62sSp0T/gKT4KksA==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "is-wsl": "^1.1.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/opn/node_modules/is-wsl": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-1.1.0.tgz", - "integrity": "sha1-HxbkqiKwTRM2tmGIpmrzxgDDpm0=", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">=4" - } - }, "node_modules/optionator": { "version": "0.9.1", "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", @@ -33993,17 +33499,6 @@ "node": ">=8" } }, - "node_modules/original": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/original/-/original-1.0.2.tgz", - "integrity": "sha512-hyBVl6iqqUOJ8FqRe+l/gS8H+kKYjrEndd5Pm1MfBtsEKA038HkkdbAl/72EAXGyonD/PFsvmVG+EvcIpliMBg==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "url-parse": "^1.4.3" - } - }, "node_modules/orval": { "version": "6.8.1", "resolved": "https://registry.npmjs.org/orval/-/orval-6.8.1.tgz", @@ -34380,6 +33875,7 @@ "version": "2.3.0", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, "dependencies": { "p-try": "^2.0.0" }, @@ -34394,6 +33890,7 @@ "version": "4.1.0", "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, "dependencies": { "p-limit": "^2.2.0" }, @@ -34416,20 +33913,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/p-retry": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/p-retry/-/p-retry-3.0.1.tgz", - "integrity": "sha512-XE6G4+YTTkT2a0UWb2kjZe8xNwf8bIbnqpc/IS/idOBVhyves0mK5OJgeocjx7q5pvX/6m23xuzVPYT1uGM73w==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "retry": "^0.12.0" - }, - "engines": { - "node": ">=6" - } - }, "node_modules/p-timeout": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-3.2.0.tgz", @@ -34446,6 +33929,7 @@ "version": "2.2.0", "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true, "engines": { "node": ">=6" } @@ -34689,6 +34173,7 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, "engines": { "node": ">=8" } @@ -34701,14 +34186,6 @@ "node": ">=0.10.0" } }, - "node_modules/path-is-inside": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/path-is-inside/-/path-is-inside-1.0.2.tgz", - "integrity": "sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM=", - "dev": true, - "optional": true, - "peer": true - }, "node_modules/path-key": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", @@ -34721,7 +34198,8 @@ "node_modules/path-parse": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", - "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==" + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true }, "node_modules/path-to-regexp": { "version": "6.2.0", @@ -34832,6 +34310,7 @@ "version": "4.2.0", "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", + "dev": true, "dependencies": { "find-up": "^4.0.0" }, @@ -34940,33 +34419,21 @@ } }, "node_modules/popmotion": { - "version": "8.7.5", - "resolved": "https://registry.npmjs.org/popmotion/-/popmotion-8.7.5.tgz", - "integrity": "sha512-p85l/qrOuLTQZ+aGfyB8cqOzDRWgiSFN941jSrj9CsWeJzUn+jiGSWJ50sr59gWAZ8TKIvqdDowqFlScc0NEyw==", - "dependencies": { - "@popmotion/easing": "^1.0.1", - "@popmotion/popcorn": "^0.4.4", - "framesync": "^4.0.0", - "hey-listen": "^1.0.5", - "style-value-types": "^3.1.7", - "stylefire": "^7.0.1", - "tslib": "^1.10.0" - } - }, - "node_modules/popmotion-pose": { - "version": "3.4.11", - "resolved": "https://registry.npmjs.org/popmotion-pose/-/popmotion-pose-3.4.11.tgz", - "integrity": "sha512-KjaevePyC1+Q3ylIcBO3YMhCouE1a/3bvtBXThrwz44fw1yXCUQagPJGkGirXI/J1xF+w3Lx3bpkkgwArizpEQ==", + "version": "11.0.3", + "resolved": "https://registry.npmjs.org/popmotion/-/popmotion-11.0.3.tgz", + "integrity": "sha512-Y55FLdj3UxkR7Vl3s7Qr4e9m0onSnP8W7d/xQLsoJM40vs6UKHFdygs6SWryasTZYqugMjm3BepCF4CWXDiHgA==", "dependencies": { - "@popmotion/easing": "^1.0.1", - "hey-listen": "^1.0.5", - "popmotion": "^8.7.1", - "pose-core": "^2.1.1", - "style-value-types": "^3.0.6", - "ts-essentials": "^1.0.3", - "tslib": "^1.10.0" + "framesync": "6.0.1", + "hey-listen": "^1.0.8", + "style-value-types": "5.0.0", + "tslib": "^2.1.0" } }, + "node_modules/popmotion/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, "node_modules/portfinder": { "version": "1.0.28", "resolved": "https://registry.npmjs.org/portfinder/-/portfinder-1.0.28.tgz", @@ -34990,36 +34457,6 @@ "ms": "^2.1.1" } }, - "node_modules/pose-core": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/pose-core/-/pose-core-2.1.1.tgz", - "integrity": "sha512-fV1sDfu80debHmKerikypqGoORMEUHVwGh/BlWnqUSmmzQGYIg8neDrdwe66hFeRO+adr2qS4ZERSu/ZVjOiSQ==", - "dependencies": { - "@types/invariant": "^2.2.29", - "@types/node": "^10.0.5", - "hey-listen": "^1.0.5", - "rollup-plugin-typescript2": "^0.25.2", - "tslib": "^1.10.0", - "typescript": "^3.7.2" - } - }, - "node_modules/pose-core/node_modules/@types/node": { - "version": "10.17.32", - "resolved": "https://registry.npmjs.org/@types/node/-/node-10.17.32.tgz", - "integrity": "sha512-EUq+cjH/3KCzQHikGnNbWAGe548IFLSm93Vl8xA7EuYEEATiyOVDyEVuGkowL7c9V69FF/RiZSAOCFPApMs/ig==" - }, - "node_modules/pose-core/node_modules/typescript": { - "version": "3.9.7", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.7.tgz", - "integrity": "sha512-BLbiRkiBzAwsjut4x/dsibSTB6yWpwT5qWmC2OfuCg3GgVQCSgMs4vEctYPhsaGtd0AeuuHMkjZ2h2WG8MSzRw==", - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" - }, - "engines": { - "node": ">=4.2.0" - } - }, "node_modules/posix-character-classes": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/posix-character-classes/-/posix-character-classes-0.1.1.tgz", @@ -35733,14 +35170,6 @@ "node": ">=0.4.x" } }, - "node_modules/querystringify": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz", - "integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==", - "dev": true, - "optional": true, - "peer": true - }, "node_modules/queue-microtask": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.2.tgz", @@ -36657,21 +36086,6 @@ "react-dom": ">0.13.0" } }, - "node_modules/react-pose": { - "version": "4.0.10", - "resolved": "https://registry.npmjs.org/react-pose/-/react-pose-4.0.10.tgz", - "integrity": "sha512-OKc5oqKw+nL9FvIokxn8MmaAmkNsWv64hLX9xWWcMWXSgEo745hzYUqDn2viMJ97mf76oPy6Vc+BS4k6Kwj78g==", - "dependencies": { - "@emotion/is-prop-valid": "^0.7.3", - "hey-listen": "^1.0.5", - "popmotion-pose": "^3.4.10", - "tslib": "^1.10.0" - }, - "peerDependencies": { - "react": "^16.3.2", - "react-dom": "^16.3.2" - } - }, "node_modules/react-query": { "version": "3.39.1", "resolved": "https://registry.npmjs.org/react-query/-/react-query-3.39.1.tgz", @@ -36888,9 +36302,9 @@ "dev": true }, "node_modules/react-scripts/node_modules/acorn": { - "version": "8.7.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.0.tgz", - "integrity": "sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ==", + "version": "8.7.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.1.tgz", + "integrity": "sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A==", "dev": true, "optional": true, "peer": true, @@ -39191,9 +38605,9 @@ "dev": true }, "node_modules/react-scripts/node_modules/type-fest": { - "version": "2.11.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.11.0.tgz", - "integrity": "sha512-GwRKR1jZMAQP/hVR929DWB5Z2lwSIM/nNcHEfDj2E0vOMhcYbqFxGKE5JaSzMdzmEtWJiamEn6VwHs/YVXVhEQ==", + "version": "2.13.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.13.0.tgz", + "integrity": "sha512-lPfAm42MxE4/456+QyIaaVBAwgpJb6xZ8PRu09utnhPdWwcyj9vgy6Sq0Z5yNbJ21EdxB5dRU/Qg8bsyAMtlcw==", "dev": true, "optional": true, "peer": true, @@ -41071,7 +40485,7 @@ "node_modules/renderkid/node_modules/strip-ansi": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", - "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "integrity": "sha512-VhumSSbBqDTP8p2ZLKj40UjBCV4+v8bUSEpUb4KjRgWk9pbqGF4REFj6KEagidb2f/M6AzC0EmFyDNGaw9OCzg==", "dev": true, "dependencies": { "ansi-regex": "^2.0.0" @@ -41137,14 +40551,6 @@ "node": ">=0.10.0" } }, - "node_modules/require-main-filename": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", - "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", - "dev": true, - "optional": true, - "peer": true - }, "node_modules/requires-port": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", @@ -41169,6 +40575,7 @@ "version": "1.12.0", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.12.0.tgz", "integrity": "sha512-B/dOmuoAik5bKcD6s6nXDCjzUKnaDvdkRyAk6rsmsKLipWj4797iothd7jmmUhWTfinVMU+wc56rYKsit2Qy4w==", + "dev": true, "dependencies": { "path-parse": "^1.0.6" } @@ -41316,17 +40723,6 @@ "node": ">=0.12" } }, - "node_modules/retry": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz", - "integrity": "sha1-G0KmJmoh8HQh0bC1S33BZ7AcATs=", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">= 4" - } - }, "node_modules/reusify": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", @@ -41371,6 +40767,7 @@ "version": "2.67.3", "resolved": "https://registry.npmjs.org/rollup/-/rollup-2.67.3.tgz", "integrity": "sha512-G/x1vUwbGtP6O5ZM8/sWr8+p7YfZhI18pPqMRtMYMWSbHjKZ/ajHGiM+GWNTlWyOR0EHIdT8LHU+Z4ciIZ1oBw==", + "dev": true, "bin": { "rollup": "dist/bin/rollup" }, @@ -41397,9 +40794,9 @@ } }, "node_modules/rollup-plugin-terser/node_modules/acorn": { - "version": "8.7.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.0.tgz", - "integrity": "sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ==", + "version": "8.7.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.1.tgz", + "integrity": "sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A==", "dev": true, "optional": true, "peer": true, @@ -41459,35 +40856,6 @@ } } }, - "node_modules/rollup-plugin-typescript2": { - "version": "0.25.3", - "resolved": "https://registry.npmjs.org/rollup-plugin-typescript2/-/rollup-plugin-typescript2-0.25.3.tgz", - "integrity": "sha512-ADkSaidKBovJmf5VBnZBZe+WzaZwofuvYdzGAKTN/J4hN7QJCFYAq7IrH9caxlru6T5qhX41PNFS1S4HqhsGQg==", - "dependencies": { - "find-cache-dir": "^3.0.0", - "fs-extra": "8.1.0", - "resolve": "1.12.0", - "rollup-pluginutils": "2.8.1", - "tslib": "1.10.0" - }, - "peerDependencies": { - "rollup": ">=1.26.3", - "typescript": ">=2.4.0" - } - }, - "node_modules/rollup-plugin-typescript2/node_modules/tslib": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.10.0.tgz", - "integrity": "sha512-qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ==" - }, - "node_modules/rollup-pluginutils": { - "version": "2.8.1", - "resolved": "https://registry.npmjs.org/rollup-pluginutils/-/rollup-pluginutils-2.8.1.tgz", - "integrity": "sha512-J5oAoysWar6GuZo0s+3bZ6sVZAC0pfqKz68De7ZgDi5z63jOVZn1uJL/+z1jeKHNbGII8kAyHF5q8LnxSX5lQg==", - "dependencies": { - "estree-walker": "^0.6.1" - } - }, "node_modules/rsvp": { "version": "4.8.5", "resolved": "https://registry.npmjs.org/rsvp/-/rsvp-4.8.5.tgz", @@ -41594,7 +40962,7 @@ "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", - "dev": true + "devOptional": true }, "node_modules/sane": { "version": "4.1.0", @@ -41838,7 +41206,7 @@ "node_modules/sane/node_modules/shebang-command": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", + "integrity": "sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==", "dev": true, "dependencies": { "shebang-regex": "^1.0.0" @@ -41850,7 +41218,7 @@ "node_modules/sane/node_modules/shebang-regex": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", + "integrity": "sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==", "dev": true, "engines": { "node": ">=0.10.0" @@ -41859,7 +41227,7 @@ "node_modules/sane/node_modules/to-regex-range": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", - "integrity": "sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", "dev": true, "dependencies": { "is-number": "^3.0.0", @@ -41945,9 +41313,9 @@ "dev": true }, "node_modules/sass": { - "version": "1.52.2", - "resolved": "https://registry.npmjs.org/sass/-/sass-1.52.2.tgz", - "integrity": "sha512-mfHB2VSeFS7sZlPv9YohB9GB7yWIgQNTGniQwfQ04EoQN0wsQEv7SwpCwy/x48Af+Z3vDeFXz+iuXM3HK/phZQ==", + "version": "1.52.3", + "resolved": "https://registry.npmjs.org/sass/-/sass-1.52.3.tgz", + "integrity": "sha512-LNNPJ9lafx+j1ArtA7GyEJm9eawXN8KlA1+5dF6IZyoONg1Tyo/g+muOsENWJH/2Q1FHbbV4UwliU0cXMa/VIA==", "dependencies": { "chokidar": ">=3.0.0 <4.0.0", "immutable": "^4.0.0", @@ -42040,21 +41408,11 @@ "node": ">= 10.15.0" } }, - "node_modules/selfsigned": { - "version": "1.10.8", - "resolved": "https://registry.npmjs.org/selfsigned/-/selfsigned-1.10.8.tgz", - "integrity": "sha512-2P4PtieJeEwVgTU9QEcwIRDQ/mXJLX8/+I3ur+Pg16nS8oNbrGxEso9NyYWy8NAmXiNl4dlAp5MwoNeCWzON4w==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "node-forge": "^0.10.0" - } - }, "node_modules/semver": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "devOptional": true, "bin": { "semver": "bin/semver.js" } @@ -42176,7 +41534,7 @@ "node_modules/serve-favicon": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/serve-favicon/-/serve-favicon-2.5.0.tgz", - "integrity": "sha1-k10kDN/g9YBTB/3+ln2IlCosvPA=", + "integrity": "sha512-FMW2RvqNr03x+C0WxTyu6sOv21oOjkq5j8tjquWccwa6ScNyGFOGJVpuS1NmTVGBAHS07xnSKotgf2ehQmf9iA==", "dev": true, "dependencies": { "etag": "~1.8.1", @@ -42320,7 +41678,7 @@ "node_modules/setimmediate": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz", - "integrity": "sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU=" + "integrity": "sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==" }, "node_modules/setprototypeof": { "version": "1.2.0", @@ -42691,72 +42049,6 @@ "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", "dev": true }, - "node_modules/sockjs": { - "version": "0.3.20", - "resolved": "https://registry.npmjs.org/sockjs/-/sockjs-0.3.20.tgz", - "integrity": "sha512-SpmVOVpdq0DJc0qArhF3E5xsxvaiqGNb73XfgBpK1y3UD5gs8DSo8aCTsuT5pX8rssdc2NDIzANwP9eCAiSdTA==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "faye-websocket": "^0.10.0", - "uuid": "^3.4.0", - "websocket-driver": "0.6.5" - } - }, - "node_modules/sockjs-client": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/sockjs-client/-/sockjs-client-1.4.0.tgz", - "integrity": "sha512-5zaLyO8/nri5cua0VtOrFXBPK1jbL4+1cebT/mmKA1E1ZXOvJrII75bPu0l0k843G/+iAbhEqzyKr0w/eCCj7g==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "debug": "^3.2.5", - "eventsource": "^1.0.7", - "faye-websocket": "~0.11.1", - "inherits": "^2.0.3", - "json3": "^3.3.2", - "url-parse": "^1.4.3" - } - }, - "node_modules/sockjs-client/node_modules/debug": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", - "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "ms": "^2.1.1" - } - }, - "node_modules/sockjs-client/node_modules/faye-websocket": { - "version": "0.11.3", - "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.11.3.tgz", - "integrity": "sha512-D2y4bovYpzziGgbHYtGCMjlJM36vAl/y+xUyn1C+FVx8szd1E+86KwVw6XvYSzOP8iMpm1X0I4xJD+QtUb36OA==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "websocket-driver": ">=0.5.1" - }, - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/sockjs/node_modules/uuid": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", - "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", - "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", - "dev": true, - "optional": true, - "peer": true, - "bin": { - "uuid": "bin/uuid" - } - }, "node_modules/socks": { "version": "2.6.2", "resolved": "https://registry.npmjs.org/socks/-/socks-2.6.2.tgz", @@ -43060,6 +42352,11 @@ "stacktrace-gps": "^3.0.4" } }, + "node_modules/state-local": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/state-local/-/state-local-1.0.7.tgz", + "integrity": "sha512-HTEHMNieakEnoe33shBYcZ7NX83ACUjCu8c40iOGEZsngj9zRnkqS9j1pqQPXwobB0ZcVTk27REb7COQ0UR59w==" + }, "node_modules/state-toggle": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/state-toggle/-/state-toggle-1.0.3.tgz", @@ -43525,14 +42822,19 @@ } }, "node_modules/style-value-types": { - "version": "3.1.9", - "resolved": "https://registry.npmjs.org/style-value-types/-/style-value-types-3.1.9.tgz", - "integrity": "sha512-050uqgB7WdvtgacoQKm+4EgKzJExVq0sieKBQQtJiU3Muh6MYcCp4T3M8+dfl6VOF2LR0NNwXBP1QYEed8DfIw==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/style-value-types/-/style-value-types-5.0.0.tgz", + "integrity": "sha512-08yq36Ikn4kx4YU6RD7jWEv27v4V+PUsOGa4n/as8Et3CuODMJQ00ENeAVXAeydX4Z2j1XHZF1K2sX4mGl18fA==", "dependencies": { "hey-listen": "^1.0.8", - "tslib": "^1.10.0" + "tslib": "^2.1.0" } }, + "node_modules/style-value-types/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, "node_modules/styled-components": { "version": "5.3.5", "resolved": "https://registry.npmjs.org/styled-components/-/styled-components-5.3.5.tgz", @@ -43576,18 +42878,6 @@ "resolved": "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.7.5.tgz", "integrity": "sha512-igX9a37DR2ZPGYtV6suZ6whr8pTFtyHL3K/oLUotxpSVO2ASaprmAe2Dkq7tBo7CRY7MMDrAa9nuQP9/YG8FxQ==" }, - "node_modules/stylefire": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/stylefire/-/stylefire-7.0.3.tgz", - "integrity": "sha512-Q0l7NSeFz/OkX+o6/7Zg3VZxSAZeQzQpYomWmIpOehFM/rJNMSLVX5fgg6Q48ut2ETNKwdhm97mPNU643EBCoQ==", - "dependencies": { - "@popmotion/popcorn": "^0.4.4", - "framesync": "^4.0.0", - "hey-listen": "^1.0.8", - "style-value-types": "^3.1.7", - "tslib": "^1.10.0" - } - }, "node_modules/stylis": { "version": "4.0.6", "resolved": "https://registry.npmjs.org/stylis/-/stylis-4.0.6.tgz", @@ -44012,9 +43302,9 @@ } }, "node_modules/terser-webpack-plugin/node_modules/terser": { - "version": "5.14.0", - "resolved": "https://registry.npmjs.org/terser/-/terser-5.14.0.tgz", - "integrity": "sha512-JC6qfIEkPBd9j1SMO3Pfn+A6w2kQV54tv+ABQLgZr7dA3k/DL/OBoYSWxzVpZev3J+bUHXfr55L8Mox7AaNo6g==", + "version": "5.14.1", + "resolved": "https://registry.npmjs.org/terser/-/terser-5.14.1.tgz", + "integrity": "sha512-+ahUAE+iheqBTDxXhTisdA8hgvbEG1hHOQ9xmNjeUJSoi6DU/gMrKNcfZjHkyY6Alnuyc+ikYJaxxfHkT3+WuQ==", "dev": true, "dependencies": { "@jridgewell/source-map": "^0.3.2", @@ -44172,7 +43462,7 @@ "node_modules/to-arraybuffer": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/to-arraybuffer/-/to-arraybuffer-1.0.1.tgz", - "integrity": "sha1-fSKbH8xjfkZsoIEYCDanqr/4P0M=", + "integrity": "sha512-okFlQcoGTi4LQBG/PgSYblw9VOyptsz2KJZqc6qtgGdes8VktzUQkj4BI2blit072iS8VODNcMA+tvnS9dnuMA==", "dev": true }, "node_modules/to-fast-properties": { @@ -44360,11 +43650,6 @@ "resolved": "https://registry.npmjs.org/ts-easing/-/ts-easing-0.2.0.tgz", "integrity": "sha512-Z86EW+fFFh/IFB1fqQ3/+7Zpf9t2ebOAxNI/V6Wo7r5gqiqtxmgTlQ1qbqQcjLKYeSHPTsEmvlJUDg/EuL0uHQ==" }, - "node_modules/ts-essentials": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/ts-essentials/-/ts-essentials-1.0.4.tgz", - "integrity": "sha512-q3N1xS4vZpRouhYHDPwO0bDW3EZ6SK9CrrDHxi/D6BPReSjpVgWIOpLS2o0gSBZm+7q/wyKp6RVM1AeeW7uyfQ==" - }, "node_modules/ts-node": { "version": "10.8.1", "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.8.1.tgz", @@ -44598,6 +43883,7 @@ "version": "4.7.3", "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.7.3.tgz", "integrity": "sha512-WOkT3XYvrpXx4vMMqlD+8R8R37fZkjyLGlxavMc4iB8lrl8L0DeTcHbYgw/v0N/z9wAFsgBhcsF0ruoySS22mA==", + "devOptional": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" @@ -44960,6 +44246,7 @@ "version": "0.1.2", "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==", + "dev": true, "engines": { "node": ">= 4.0.0" } @@ -45296,18 +44583,6 @@ "url": "https://opencollective.com/webpack" } }, - "node_modules/url-parse": { - "version": "1.5.9", - "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.9.tgz", - "integrity": "sha512-HpOvhKBvre8wYez+QhHcYiVvVmeF6DVnuSOOPhe3cTum3BnqHhvKaZm8FU5yTiOu/Jut2ZpB2rA/SbBA1JIGlQ==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "querystringify": "^2.1.1", - "requires-port": "^1.0.0" - } - }, "node_modules/url-parse-lax": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-3.0.0.tgz", @@ -45963,7 +45238,7 @@ "node_modules/watchpack-chokidar2/node_modules/to-regex-range": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", - "integrity": "sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", "dev": true, "optional": true, "dependencies": { @@ -46077,725 +45352,6 @@ "webpack": "^4.0.0 || ^5.0.0" } }, - "node_modules/webpack-dev-server": { - "version": "3.11.0", - "resolved": "https://registry.npmjs.org/webpack-dev-server/-/webpack-dev-server-3.11.0.tgz", - "integrity": "sha512-PUxZ+oSTxogFQgkTtFndEtJIPNmml7ExwufBZ9L2/Xyyd5PnOL5UreWe5ZT7IU25DSdykL9p1MLQzmLh2ljSeg==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "ansi-html": "0.0.7", - "bonjour": "^3.5.0", - "chokidar": "^2.1.8", - "compression": "^1.7.4", - "connect-history-api-fallback": "^1.6.0", - "debug": "^4.1.1", - "del": "^4.1.1", - "express": "^4.17.1", - "html-entities": "^1.3.1", - "http-proxy-middleware": "0.19.1", - "import-local": "^2.0.0", - "internal-ip": "^4.3.0", - "ip": "^1.1.5", - "is-absolute-url": "^3.0.3", - "killable": "^1.0.1", - "loglevel": "^1.6.8", - "opn": "^5.5.0", - "p-retry": "^3.0.1", - "portfinder": "^1.0.26", - "schema-utils": "^1.0.0", - "selfsigned": "^1.10.7", - "semver": "^6.3.0", - "serve-index": "^1.9.1", - "sockjs": "0.3.20", - "sockjs-client": "1.4.0", - "spdy": "^4.0.2", - "strip-ansi": "^3.0.1", - "supports-color": "^6.1.0", - "url": "^0.11.0", - "webpack-dev-middleware": "^3.7.2", - "webpack-log": "^2.0.0", - "ws": "^6.2.1", - "yargs": "^13.3.2" - }, - "bin": { - "webpack-dev-server": "bin/webpack-dev-server.js" - }, - "engines": { - "node": ">= 6.11.5" - }, - "peerDependencies": { - "webpack": "^4.0.0 || ^5.0.0" - }, - "peerDependenciesMeta": { - "webpack-cli": { - "optional": true - } - } - }, - "node_modules/webpack-dev-server/node_modules/ansi-regex": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", - "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/webpack-dev-server/node_modules/anymatch": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-2.0.0.tgz", - "integrity": "sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "micromatch": "^3.1.4", - "normalize-path": "^2.1.1" - } - }, - "node_modules/webpack-dev-server/node_modules/anymatch/node_modules/normalize-path": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.1.1.tgz", - "integrity": "sha1-GrKLVW4Zg2Oowab35vogE3/mrtk=", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "remove-trailing-separator": "^1.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/webpack-dev-server/node_modules/binary-extensions": { - "version": "1.13.1", - "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-1.13.1.tgz", - "integrity": "sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw==", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/webpack-dev-server/node_modules/braces": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", - "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "arr-flatten": "^1.1.0", - "array-unique": "^0.3.2", - "extend-shallow": "^2.0.1", - "fill-range": "^4.0.0", - "isobject": "^3.0.1", - "repeat-element": "^1.1.2", - "snapdragon": "^0.8.1", - "snapdragon-node": "^2.0.1", - "split-string": "^3.0.2", - "to-regex": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/webpack-dev-server/node_modules/braces/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "is-extendable": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/webpack-dev-server/node_modules/camelcase": { - "version": "5.3.1", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", - "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/webpack-dev-server/node_modules/chokidar": { - "version": "2.1.8", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-2.1.8.tgz", - "integrity": "sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg==", - "deprecated": "Chokidar 2 will break on node v14+. Upgrade to chokidar 3 with 15x less dependencies.", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "anymatch": "^2.0.0", - "async-each": "^1.0.1", - "braces": "^2.3.2", - "glob-parent": "^3.1.0", - "inherits": "^2.0.3", - "is-binary-path": "^1.0.0", - "is-glob": "^4.0.0", - "normalize-path": "^3.0.0", - "path-is-absolute": "^1.0.0", - "readdirp": "^2.2.1", - "upath": "^1.1.1" - }, - "optionalDependencies": { - "fsevents": "^1.2.7" - } - }, - "node_modules/webpack-dev-server/node_modules/cliui": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-5.0.0.tgz", - "integrity": "sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "string-width": "^3.1.0", - "strip-ansi": "^5.2.0", - "wrap-ansi": "^5.1.0" - } - }, - "node_modules/webpack-dev-server/node_modules/cliui/node_modules/ansi-regex": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", - "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/webpack-dev-server/node_modules/cliui/node_modules/strip-ansi": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", - "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "ansi-regex": "^4.1.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/webpack-dev-server/node_modules/emoji-regex": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", - "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", - "dev": true, - "optional": true, - "peer": true - }, - "node_modules/webpack-dev-server/node_modules/fill-range": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", - "integrity": "sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc=", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "extend-shallow": "^2.0.1", - "is-number": "^3.0.0", - "repeat-string": "^1.6.1", - "to-regex-range": "^2.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/webpack-dev-server/node_modules/fill-range/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "is-extendable": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/webpack-dev-server/node_modules/find-up": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", - "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "locate-path": "^3.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/webpack-dev-server/node_modules/fsevents": { - "version": "1.2.13", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-1.2.13.tgz", - "integrity": "sha512-oWb1Z6mkHIskLzEJ/XWX0srkpkTQ7vaopMQkyaEIoq0fmtFVxOthb8cCxeT+p3ynTdkk/RZwbgG4brR5BeWECw==", - "deprecated": "fsevents 1 will break on node v14+ and could be using insecure binaries. Upgrade to fsevents 2.", - "dev": true, - "hasInstallScript": true, - "optional": true, - "os": [ - "darwin" - ], - "peer": true, - "dependencies": { - "bindings": "^1.5.0", - "nan": "^2.12.1" - }, - "engines": { - "node": ">= 4.0" - } - }, - "node_modules/webpack-dev-server/node_modules/glob-parent": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-3.1.0.tgz", - "integrity": "sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4=", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "is-glob": "^3.1.0", - "path-dirname": "^1.0.0" - } - }, - "node_modules/webpack-dev-server/node_modules/glob-parent/node_modules/is-glob": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz", - "integrity": "sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo=", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "is-extglob": "^2.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/webpack-dev-server/node_modules/import-local": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/import-local/-/import-local-2.0.0.tgz", - "integrity": "sha512-b6s04m3O+s3CGSbqDIyP4R6aAwAeYlVq9+WUWep6iHa8ETRf9yei1U48C5MmfJmV9AiLYYBKPMq/W+/WRpQmCQ==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "pkg-dir": "^3.0.0", - "resolve-cwd": "^2.0.0" - }, - "bin": { - "import-local-fixture": "fixtures/cli.js" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/webpack-dev-server/node_modules/is-binary-path": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-1.0.1.tgz", - "integrity": "sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg=", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "binary-extensions": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/webpack-dev-server/node_modules/is-fullwidth-code-point": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/webpack-dev-server/node_modules/is-number": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", - "integrity": "sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU=", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "kind-of": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/webpack-dev-server/node_modules/is-number/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/webpack-dev-server/node_modules/locate-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", - "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "p-locate": "^3.0.0", - "path-exists": "^3.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/webpack-dev-server/node_modules/micromatch": { - "version": "3.1.10", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", - "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "arr-diff": "^4.0.0", - "array-unique": "^0.3.2", - "braces": "^2.3.1", - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "extglob": "^2.0.4", - "fragment-cache": "^0.2.1", - "kind-of": "^6.0.2", - "nanomatch": "^1.2.9", - "object.pick": "^1.3.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/webpack-dev-server/node_modules/p-locate": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", - "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "p-limit": "^2.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/webpack-dev-server/node_modules/path-exists": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", - "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/webpack-dev-server/node_modules/pkg-dir": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-3.0.0.tgz", - "integrity": "sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "find-up": "^3.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/webpack-dev-server/node_modules/readable-stream": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", - "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "node_modules/webpack-dev-server/node_modules/readdirp": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-2.2.1.tgz", - "integrity": "sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "graceful-fs": "^4.1.11", - "micromatch": "^3.1.10", - "readable-stream": "^2.0.2" - }, - "engines": { - "node": ">=0.10" - } - }, - "node_modules/webpack-dev-server/node_modules/resolve-cwd": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-2.0.0.tgz", - "integrity": "sha1-AKn3OHVW4nA46uIyyqNypqWbZlo=", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "resolve-from": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/webpack-dev-server/node_modules/resolve-from": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-3.0.0.tgz", - "integrity": "sha1-six699nWiBvItuZTM17rywoYh0g=", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/webpack-dev-server/node_modules/schema-utils": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-1.0.0.tgz", - "integrity": "sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "ajv": "^6.1.0", - "ajv-errors": "^1.0.0", - "ajv-keywords": "^3.1.0" - }, - "engines": { - "node": ">= 4" - } - }, - "node_modules/webpack-dev-server/node_modules/string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "safe-buffer": "~5.1.0" - } - }, - "node_modules/webpack-dev-server/node_modules/string-width": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", - "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "emoji-regex": "^7.0.1", - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^5.1.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/webpack-dev-server/node_modules/string-width/node_modules/ansi-regex": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", - "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/webpack-dev-server/node_modules/string-width/node_modules/strip-ansi": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", - "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "ansi-regex": "^4.1.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/webpack-dev-server/node_modules/strip-ansi": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", - "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "ansi-regex": "^2.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/webpack-dev-server/node_modules/supports-color": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", - "integrity": "sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "has-flag": "^3.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/webpack-dev-server/node_modules/to-regex-range": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", - "integrity": "sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "is-number": "^3.0.0", - "repeat-string": "^1.6.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/webpack-dev-server/node_modules/wrap-ansi": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", - "integrity": "sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "ansi-styles": "^3.2.0", - "string-width": "^3.0.0", - "strip-ansi": "^5.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/webpack-dev-server/node_modules/wrap-ansi/node_modules/ansi-regex": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", - "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/webpack-dev-server/node_modules/wrap-ansi/node_modules/strip-ansi": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", - "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "ansi-regex": "^4.1.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/webpack-dev-server/node_modules/ws": { - "version": "6.2.2", - "resolved": "https://registry.npmjs.org/ws/-/ws-6.2.2.tgz", - "integrity": "sha512-zmhltoSR8u1cnDsD43TX59mzoMZsLKqUweyYBAIvTngR3shc0W6aOZylZmq/7hqyVxPdi+5Ud2QInblgyE72fw==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "async-limiter": "~1.0.0" - } - }, - "node_modules/webpack-dev-server/node_modules/yargs": { - "version": "13.3.2", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-13.3.2.tgz", - "integrity": "sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "cliui": "^5.0.0", - "find-up": "^3.0.0", - "get-caller-file": "^2.0.1", - "require-directory": "^2.1.1", - "require-main-filename": "^2.0.0", - "set-blocking": "^2.0.0", - "string-width": "^3.0.0", - "which-module": "^2.0.0", - "y18n": "^4.0.0", - "yargs-parser": "^13.1.2" - } - }, - "node_modules/webpack-dev-server/node_modules/yargs-parser": { - "version": "13.1.2", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.1.2.tgz", - "integrity": "sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "camelcase": "^5.0.0", - "decamelize": "^1.2.0" - } - }, "node_modules/webpack-hot-middleware": { "version": "2.25.1", "resolved": "https://registry.npmjs.org/webpack-hot-middleware/-/webpack-hot-middleware-2.25.1.tgz", @@ -47208,14 +45764,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/which-module": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", - "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=", - "dev": true, - "optional": true, - "peer": true - }, "node_modules/which-pm-runs": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/which-pm-runs/-/which-pm-runs-1.0.0.tgz", @@ -49470,17 +48018,19 @@ "integrity": "sha512-kBJtf7PH6aWwZ6fka3zQ0p6SBYzx4fl1LoZXE2RrnYST9Xljm7WfKJrU4g/Xr3Beg72MLrp1AWNUmuYJTL7Cow==" }, "@emotion/is-prop-valid": { - "version": "0.7.3", - "resolved": "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-0.7.3.tgz", - "integrity": "sha512-uxJqm/sqwXw3YPA5GXX365OBcJGFtxUVkB6WyezqFHlNe9jqUWH5ur2O2M8dGBz61kn1g3ZBlzUunFQXQIClhA==", + "version": "0.8.8", + "resolved": "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-0.8.8.tgz", + "integrity": "sha512-u5WtneEAr5IDG2Wv65yhunPSMLIpuKsbuOktRojfrEiEvRyC85LgPMZI63cr7NUqT8ZIGdSVg8ZKGxIug4lXcA==", + "optional": true, "requires": { - "@emotion/memoize": "0.7.1" + "@emotion/memoize": "0.7.4" } }, "@emotion/memoize": { - "version": "0.7.1", - "resolved": "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.7.1.tgz", - "integrity": "sha512-Qv4LTqO11jepd5Qmlp3M1YEjBumoTHcHFdgPTQ+sFlIL5myi/7xu/POwP7IRu6odBdmLXdtIs1D6TuW6kbwbbg==" + "version": "0.7.4", + "resolved": "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.7.4.tgz", + "integrity": "sha512-Ja/Vfqe3HpuzRsG1oBtWTHk2PGZ7GR+2Vz5iYGelAw8dx32K0y7PjVuxK6z1nMpZOqAFsRUPCkK1YjJ56qJlgw==", + "optional": true }, "@emotion/react": { "version": "11.4.0", @@ -49614,14 +48164,14 @@ "dev": true }, "@firebase/analytics": { - "version": "0.7.9", - "resolved": "https://registry.npmjs.org/@firebase/analytics/-/analytics-0.7.9.tgz", - "integrity": "sha512-h/2L2q4/+mmV9EdvVC3XwFFbKSh8bvaYu4DMJIKnPAuGze6W5ALBLkK2GcVti6Kz1NTMJ3puxTRWE9XxRGZipQ==", - "requires": { - "@firebase/component": "0.5.14", - "@firebase/installations": "0.5.9", - "@firebase/logger": "0.3.2", - "@firebase/util": "1.6.0", + "version": "0.7.10", + "resolved": "https://registry.npmjs.org/@firebase/analytics/-/analytics-0.7.10.tgz", + "integrity": "sha512-efZ9jdzTW1/COE5gVdJVdplsltooKPH7M3XpSi/kDyegR1sC05C5NQaiBIYcaTyX2yf1OVcCfsWEcZFhhPTPGw==", + "requires": { + "@firebase/component": "0.5.15", + "@firebase/installations": "0.5.10", + "@firebase/logger": "0.3.3", + "@firebase/util": "1.6.1", "tslib": "^2.1.0" }, "dependencies": { @@ -49633,14 +48183,14 @@ } }, "@firebase/analytics-compat": { - "version": "0.1.10", - "resolved": "https://registry.npmjs.org/@firebase/analytics-compat/-/analytics-compat-0.1.10.tgz", - "integrity": "sha512-7zfB+BBO5RbF7RSHOA4ZPyLvOEEvMOhRbfIjh5ZmizAQY2J6tZB8t+dwQ/q4hqZVGgw4ds4g0JYuRKZKYsWADg==", + "version": "0.1.11", + "resolved": "https://registry.npmjs.org/@firebase/analytics-compat/-/analytics-compat-0.1.11.tgz", + "integrity": "sha512-Jx5iXM3nlMa6utqGWNDtmdIztFhLCqMx2Iw809BbynhTSa3esF4e5RevCRk+5oDDfW11uLHckLpe6MhmINKIkA==", "requires": { - "@firebase/analytics": "0.7.9", + "@firebase/analytics": "0.7.10", "@firebase/analytics-types": "0.7.0", - "@firebase/component": "0.5.14", - "@firebase/util": "1.6.0", + "@firebase/component": "0.5.15", + "@firebase/util": "1.6.1", "tslib": "^2.1.0" }, "dependencies": { @@ -49657,13 +48207,13 @@ "integrity": "sha512-DNE2Waiwy5+zZnCfintkDtBfaW6MjIG883474v6Z0K1XZIvl76cLND4iv0YUb48leyF+PJK1KO2XrgHb/KpmhQ==" }, "@firebase/app": { - "version": "0.7.25", - "resolved": "https://registry.npmjs.org/@firebase/app/-/app-0.7.25.tgz", - "integrity": "sha512-OemDA3NZS1oEbAPFlWHeVI8Od26ZHAXUivUWFYIsYrw+YjS7FloltwyHB06Q8LQyPJIBPubGkEuzNTHz32EDCQ==", + "version": "0.7.26", + "resolved": "https://registry.npmjs.org/@firebase/app/-/app-0.7.26.tgz", + "integrity": "sha512-FmJ4uaUyazmOZZWJO9OviKfnw+lrwMPQbWBMutymSQT8Gx783Ddnhs5IdmfV0NeLrlGy4ZwfP6/+RJyy2wGDXw==", "requires": { - "@firebase/component": "0.5.14", - "@firebase/logger": "0.3.2", - "@firebase/util": "1.6.0", + "@firebase/component": "0.5.15", + "@firebase/logger": "0.3.3", + "@firebase/util": "1.6.1", "idb": "7.0.1", "tslib": "^2.1.0" }, @@ -49676,13 +48226,13 @@ } }, "@firebase/app-check": { - "version": "0.5.8", - "resolved": "https://registry.npmjs.org/@firebase/app-check/-/app-check-0.5.8.tgz", - "integrity": "sha512-DgrXnrJT0S5csa5CsvmWWSWqy61T3rOE2iZ/L4Q8+xZsjU2McpUj8g/lU8NDa4qc5mGRZ/Qjozqog1H3pwPgGw==", + "version": "0.5.9", + "resolved": "https://registry.npmjs.org/@firebase/app-check/-/app-check-0.5.9.tgz", + "integrity": "sha512-IxOSpw4cL6fQD2AGLhXHxsdCjzQEYGyRwvS2vtguMxTYhRQ/EWXvej+P42cXf373vDrmAMKrnIUgC4P1yMPLSA==", "requires": { - "@firebase/component": "0.5.14", - "@firebase/logger": "0.3.2", - "@firebase/util": "1.6.0", + "@firebase/component": "0.5.15", + "@firebase/logger": "0.3.3", + "@firebase/util": "1.6.1", "tslib": "^2.1.0" }, "dependencies": { @@ -49694,15 +48244,15 @@ } }, "@firebase/app-check-compat": { - "version": "0.2.8", - "resolved": "https://registry.npmjs.org/@firebase/app-check-compat/-/app-check-compat-0.2.8.tgz", - "integrity": "sha512-EAqFa0juE2xc52IGh2nv8E+avTLsZfbO7fkJnhPu07e5FU39pptcsRckTdHU7v1/DuWuigUVFcOD5iic9I8TQw==", + "version": "0.2.9", + "resolved": "https://registry.npmjs.org/@firebase/app-check-compat/-/app-check-compat-0.2.9.tgz", + "integrity": "sha512-DgHCcUR3vC3KrAQccs+cggTjNusF/oxPJmw1397H0jw5vWVu0oTtmIduyKB2GE0KDo0q0bHNPPR8GEVugjeFPg==", "requires": { - "@firebase/app-check": "0.5.8", + "@firebase/app-check": "0.5.9", "@firebase/app-check-types": "0.4.0", - "@firebase/component": "0.5.14", - "@firebase/logger": "0.3.2", - "@firebase/util": "1.6.0", + "@firebase/component": "0.5.15", + "@firebase/logger": "0.3.3", + "@firebase/util": "1.6.1", "tslib": "^2.1.0" }, "dependencies": { @@ -49724,14 +48274,14 @@ "integrity": "sha512-SsWafqMABIOu7zLgWbmwvHGOeQQVQlwm42kwwubsmfLmL4Sf5uGpBfDhQ0CAkpi7bkJ/NwNFKafNDL9prRNP0Q==" }, "@firebase/app-compat": { - "version": "0.1.26", - "resolved": "https://registry.npmjs.org/@firebase/app-compat/-/app-compat-0.1.26.tgz", - "integrity": "sha512-i5UTq1HZAHuhe7RNjgFSezbow4jVxc2oe3Gndsv+Hdut92f8L0AyssOtdU2iOylLlxbTijewAXXui4FAUzXubw==", - "requires": { - "@firebase/app": "0.7.25", - "@firebase/component": "0.5.14", - "@firebase/logger": "0.3.2", - "@firebase/util": "1.6.0", + "version": "0.1.27", + "resolved": "https://registry.npmjs.org/@firebase/app-compat/-/app-compat-0.1.27.tgz", + "integrity": "sha512-0A5ENP/KK0Eev94qPuxaclfOE0oA6hyCVQTdi0ox1bPm+VzGGD/jXP6Bzw+IUmy33ChjP/639bm6Myh8AG4PwA==", + "requires": { + "@firebase/app": "0.7.26", + "@firebase/component": "0.5.15", + "@firebase/logger": "0.3.3", + "@firebase/util": "1.6.1", "tslib": "^2.1.0" }, "dependencies": { @@ -49748,13 +48298,13 @@ "integrity": "sha512-6fbHQwDv2jp/v6bXhBw2eSRbNBpxHcd1NBF864UksSMVIqIyri9qpJB1Mn6sGZE+bnDsSQBC5j2TbMxYsJQkQg==" }, "@firebase/auth": { - "version": "0.20.2", - "resolved": "https://registry.npmjs.org/@firebase/auth/-/auth-0.20.2.tgz", - "integrity": "sha512-anv2dhHXnlHSuXDuXIoCm/w/JJ+SiQ1TAKgNVYlhfq+yvx9Op8CxfTqcfBwfbIZ1gizw4PNLuk82m8KelsKl6Q==", + "version": "0.20.3", + "resolved": "https://registry.npmjs.org/@firebase/auth/-/auth-0.20.3.tgz", + "integrity": "sha512-iElaZvVxxW2WAAmmqwTkdPBdixdI2TpURACwNn0G4XpuxlNeF3hYK1nDla2Oa/r39QGtlb9FChTTBby4Uu/Flw==", "requires": { - "@firebase/component": "0.5.14", - "@firebase/logger": "0.3.2", - "@firebase/util": "1.6.0", + "@firebase/component": "0.5.15", + "@firebase/logger": "0.3.3", + "@firebase/util": "1.6.1", "node-fetch": "2.6.7", "selenium-webdriver": "4.1.2", "tslib": "^2.1.0" @@ -49768,14 +48318,14 @@ } }, "@firebase/auth-compat": { - "version": "0.2.15", - "resolved": "https://registry.npmjs.org/@firebase/auth-compat/-/auth-compat-0.2.15.tgz", - "integrity": "sha512-Kl8pujKWVBJ+76h4tRsS5xI9Dvk8MVSP6eN82rnEgmCxiUsnVj5Adb/WzvS3p4/l++4mRSAEnlIVxZ2Pyaeirg==", + "version": "0.2.16", + "resolved": "https://registry.npmjs.org/@firebase/auth-compat/-/auth-compat-0.2.16.tgz", + "integrity": "sha512-wwyuBwtCXwygr1Vyr7M4v8iD1eGRUEGM0XNGG2BQkFnlF7rkwpGsmgiiSkaA8kFYibNSTx2TkdBNfvJXzYPL6A==", "requires": { - "@firebase/auth": "0.20.2", + "@firebase/auth": "0.20.3", "@firebase/auth-types": "0.11.0", - "@firebase/component": "0.5.14", - "@firebase/util": "1.6.0", + "@firebase/component": "0.5.15", + "@firebase/util": "1.6.1", "node-fetch": "2.6.7", "selenium-webdriver": "4.1.2", "tslib": "^2.1.0" @@ -49801,11 +48351,11 @@ "requires": {} }, "@firebase/component": { - "version": "0.5.14", - "resolved": "https://registry.npmjs.org/@firebase/component/-/component-0.5.14.tgz", - "integrity": "sha512-ct2p1MTMV5P/nGIlkC3XjAVwHwjsIZaeo8JVyDAkJCNTROu5mYX3FBK16hjIUIIVJDpgnnzFh9nP74gciL4WrA==", + "version": "0.5.15", + "resolved": "https://registry.npmjs.org/@firebase/component/-/component-0.5.15.tgz", + "integrity": "sha512-VRnZxmvtJmXupTPg37LxM0zdyMN54EXkmsFD4x5Bm4eZUay9VGnhfiGnE3m9Af/2hnURA2idIBN/23L6982iPQ==", "requires": { - "@firebase/util": "1.6.0", + "@firebase/util": "1.6.1", "tslib": "^2.1.0" }, "dependencies": { @@ -49817,14 +48367,14 @@ } }, "@firebase/database": { - "version": "0.13.0", - "resolved": "https://registry.npmjs.org/@firebase/database/-/database-0.13.0.tgz", - "integrity": "sha512-lskyf5+FDnytrPJt3MLjkTDxYxutKtaYL7j/Z/De2DSVZJSR+weE/D/r47iK/+tyzMaew2v3joSgZOHvVlWshw==", + "version": "0.13.1", + "resolved": "https://registry.npmjs.org/@firebase/database/-/database-0.13.1.tgz", + "integrity": "sha512-k6PeAzf9x9DG3AJtA6SkJsTD1ivOWvrV71VPOYabBch05QDB0HOYs1EauGhzqa6GOcYz+ncb4pNEkgFDvcnEfQ==", "requires": { "@firebase/auth-interop-types": "0.1.6", - "@firebase/component": "0.5.14", - "@firebase/logger": "0.3.2", - "@firebase/util": "1.6.0", + "@firebase/component": "0.5.15", + "@firebase/logger": "0.3.3", + "@firebase/util": "1.6.1", "faye-websocket": "0.11.4", "tslib": "^2.1.0" }, @@ -49845,15 +48395,15 @@ } }, "@firebase/database-compat": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/@firebase/database-compat/-/database-compat-0.2.0.tgz", - "integrity": "sha512-t2HVI1RrMz8cbmhyo2LQGSInhRN9DZTDKXm55iFQgSihcnCbfoMAFyRv/FFa1Y+iERgcDI8LaOMS/LTjpYVz4g==", - "requires": { - "@firebase/component": "0.5.14", - "@firebase/database": "0.13.0", - "@firebase/database-types": "0.9.8", - "@firebase/logger": "0.3.2", - "@firebase/util": "1.6.0", + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/@firebase/database-compat/-/database-compat-0.2.1.tgz", + "integrity": "sha512-xpru5ZtO7um2FmfIw4gCAbkWpyOEwxzamU/5phuwze3ZihMdh+UrDrwrhvfqzQ/KIKXsK76Uyx5F3NCAS8+5eg==", + "requires": { + "@firebase/component": "0.5.15", + "@firebase/database": "0.13.1", + "@firebase/database-types": "0.9.9", + "@firebase/logger": "0.3.3", + "@firebase/util": "1.6.1", "tslib": "^2.1.0" }, "dependencies": { @@ -49865,23 +48415,23 @@ } }, "@firebase/database-types": { - "version": "0.9.8", - "resolved": "https://registry.npmjs.org/@firebase/database-types/-/database-types-0.9.8.tgz", - "integrity": "sha512-bI7bwF5xc0nPi6Oa3JVt6JJdfhVAnEpCwgfTNILR4lYDPtxdxlRXhZzQ5lfqlCj7PR+drKh9RvMu6C24N1q04w==", + "version": "0.9.9", + "resolved": "https://registry.npmjs.org/@firebase/database-types/-/database-types-0.9.9.tgz", + "integrity": "sha512-Zp86fHzQFZKYVM7yDWVAgVTeOJ39g2wT0ijeiN0jpHAHceeoV013q3jPIIGuooV2HMwWOTIBZGqh+DxrHMFyUw==", "requires": { "@firebase/app-types": "0.7.0", - "@firebase/util": "1.6.0" + "@firebase/util": "1.6.1" } }, "@firebase/firestore": { - "version": "3.4.9", - "resolved": "https://registry.npmjs.org/@firebase/firestore/-/firestore-3.4.9.tgz", - "integrity": "sha512-EiSG/uYDyUmrrHlwrsP9WqWI8ChD0hUW/+0MS3NDh8Cfo1Dfb/sM3YWKzgnIZ3wKTxn/nbe9oidHZp5cqI9G+w==", - "requires": { - "@firebase/component": "0.5.14", - "@firebase/logger": "0.3.2", - "@firebase/util": "1.6.0", - "@firebase/webchannel-wrapper": "0.6.1", + "version": "3.4.10", + "resolved": "https://registry.npmjs.org/@firebase/firestore/-/firestore-3.4.10.tgz", + "integrity": "sha512-QUW9B7U8G0zbontuEPCJaoD320AZPOM4skV+Jd+WJIUUrmg/pLCW68Tt9ycg6zQ+1WdJtzaOU35NPJS7VIP8Ug==", + "requires": { + "@firebase/component": "0.5.15", + "@firebase/logger": "0.3.3", + "@firebase/util": "1.6.1", + "@firebase/webchannel-wrapper": "0.6.2", "@grpc/grpc-js": "^1.3.2", "@grpc/proto-loader": "^0.6.0", "node-fetch": "2.6.7", @@ -49896,14 +48446,14 @@ } }, "@firebase/firestore-compat": { - "version": "0.1.18", - "resolved": "https://registry.npmjs.org/@firebase/firestore-compat/-/firestore-compat-0.1.18.tgz", - "integrity": "sha512-D6VXudL/B2jlZ6MGpsDPHHm/DSpfKuUOnEb5wwH89Sw0nW5snSMNG8QfYTQYKUxrX35ma+nWUnaa18LlVTUMXQ==", + "version": "0.1.19", + "resolved": "https://registry.npmjs.org/@firebase/firestore-compat/-/firestore-compat-0.1.19.tgz", + "integrity": "sha512-fE3anYxNvX50zILPdGZaJBFcK3NPOHzZR7lLupFBsmd0YFtFT4E89p0QQ3A/oZK9/74jNuvjZoJ8hamknPkZHQ==", "requires": { - "@firebase/component": "0.5.14", - "@firebase/firestore": "3.4.9", + "@firebase/component": "0.5.15", + "@firebase/firestore": "3.4.10", "@firebase/firestore-types": "2.5.0", - "@firebase/util": "1.6.0", + "@firebase/util": "1.6.1", "tslib": "^2.1.0" }, "dependencies": { @@ -49921,15 +48471,15 @@ "requires": {} }, "@firebase/functions": { - "version": "0.8.1", - "resolved": "https://registry.npmjs.org/@firebase/functions/-/functions-0.8.1.tgz", - "integrity": "sha512-UF5187TPn1Q1sFmAUU1oZdKub1t0Z6MAjcskGS6CV4OwAkILZQ9v38LIbo3wnA62R5hr3IFpdEJxKkqHojMwSg==", + "version": "0.8.2", + "resolved": "https://registry.npmjs.org/@firebase/functions/-/functions-0.8.2.tgz", + "integrity": "sha512-w2ng6vodOYj7Xo/J3h0SN6NfpRzId00DOKZDvGylH+LoQPFBshHJmv2mpM5ljEntxWvtv3aGrjD6YvgKr9JUJA==", "requires": { "@firebase/app-check-interop-types": "0.1.0", "@firebase/auth-interop-types": "0.1.6", - "@firebase/component": "0.5.14", + "@firebase/component": "0.5.15", "@firebase/messaging-interop-types": "0.1.0", - "@firebase/util": "1.6.0", + "@firebase/util": "1.6.1", "node-fetch": "2.6.7", "tslib": "^2.1.0" }, @@ -49942,14 +48492,14 @@ } }, "@firebase/functions-compat": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/@firebase/functions-compat/-/functions-compat-0.2.1.tgz", - "integrity": "sha512-1epI+TGb3CxpQrnoSJnKMUqBLn9b6KA1Rro6ISmZIEkaDEi8p8q3UI917XP+OewiPG71xvpySiEIIxWyktcl+A==", + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/@firebase/functions-compat/-/functions-compat-0.2.2.tgz", + "integrity": "sha512-CeAoQDVrrqjc6q0prgyO3mEDDQM84vSH09sNRRMxd9kTjZtKZD4DXf+BKfULSvMAK9mgmL70LBz8RsrcXs6YXg==", "requires": { - "@firebase/component": "0.5.14", - "@firebase/functions": "0.8.1", + "@firebase/component": "0.5.15", + "@firebase/functions": "0.8.2", "@firebase/functions-types": "0.5.0", - "@firebase/util": "1.6.0", + "@firebase/util": "1.6.1", "tslib": "^2.1.0" }, "dependencies": { @@ -49966,12 +48516,12 @@ "integrity": "sha512-qza0M5EwX+Ocrl1cYI14zoipUX4gI/Shwqv0C1nB864INAD42Dgv4v94BCyxGHBg2kzlWy8PNafdP7zPO8aJQA==" }, "@firebase/installations": { - "version": "0.5.9", - "resolved": "https://registry.npmjs.org/@firebase/installations/-/installations-0.5.9.tgz", - "integrity": "sha512-0XvF9ig8Zj7MWP4Aq5/Wcyjq9f/cDtD6DKFJhp3BT1AjmACdmq7WD72xok8UBhkOiqymIiGd5eQf7rX225D2Sw==", + "version": "0.5.10", + "resolved": "https://registry.npmjs.org/@firebase/installations/-/installations-0.5.10.tgz", + "integrity": "sha512-lTnYmtGPXwLqjiqvS4KH/V9a3vtZYWBU3Lsx+iOndFkzEyEANQ4qwUgZsP94qWRFd1WumcgDqhFmoVeYkDQCew==", "requires": { - "@firebase/component": "0.5.14", - "@firebase/util": "1.6.0", + "@firebase/component": "0.5.15", + "@firebase/util": "1.6.1", "idb": "7.0.1", "tslib": "^2.1.0" }, @@ -49984,9 +48534,9 @@ } }, "@firebase/logger": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/@firebase/logger/-/logger-0.3.2.tgz", - "integrity": "sha512-lzLrcJp9QBWpo40OcOM9B8QEtBw2Fk1zOZQdvv+rWS6gKmhQBCEMc4SMABQfWdjsylBcDfniD1Q+fUX1dcBTXA==", + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@firebase/logger/-/logger-0.3.3.tgz", + "integrity": "sha512-POTJl07jOKTOevLXrTvJD/VZ0M6PnJXflbAh5J9VGkmtXPXNG6MdZ9fmRgqYhXKTaDId6AQenQ262uwgpdtO0Q==", "requires": { "tslib": "^2.1.0" }, @@ -49999,14 +48549,14 @@ } }, "@firebase/messaging": { - "version": "0.9.13", - "resolved": "https://registry.npmjs.org/@firebase/messaging/-/messaging-0.9.13.tgz", - "integrity": "sha512-wR/SGYGG/bmz1gRqm6/eGI6zRg/X3qNP0BCk0Oa6xVDKK04UCE9zNRgQYgCSKNP+zuLfDhpHbXvvXQp9/vBYVA==", + "version": "0.9.14", + "resolved": "https://registry.npmjs.org/@firebase/messaging/-/messaging-0.9.14.tgz", + "integrity": "sha512-TrSDdZT/wI94m+kay4ibYDUsBiSkijU71zWhSXPJRGSUNuq8EP0ecs2eu01Kipb6ghl5YUiFFL/uY5Y6WK/I5A==", "requires": { - "@firebase/component": "0.5.14", - "@firebase/installations": "0.5.9", + "@firebase/component": "0.5.15", + "@firebase/installations": "0.5.10", "@firebase/messaging-interop-types": "0.1.0", - "@firebase/util": "1.6.0", + "@firebase/util": "1.6.1", "idb": "7.0.1", "tslib": "^2.1.0" }, @@ -50019,13 +48569,13 @@ } }, "@firebase/messaging-compat": { - "version": "0.1.13", - "resolved": "https://registry.npmjs.org/@firebase/messaging-compat/-/messaging-compat-0.1.13.tgz", - "integrity": "sha512-kGuzjpl+pcTRmEgGDjyOKQnxxQgC7wIJIIHhLMIpfxHHL5+ysN1Tjq0Ztr1t/gcdHKErtnD/n9To5eoGZHqpzA==", + "version": "0.1.14", + "resolved": "https://registry.npmjs.org/@firebase/messaging-compat/-/messaging-compat-0.1.14.tgz", + "integrity": "sha512-XNF5+TxhbFa5nAmkf/PbbNFfmiTcyBfjIl322Me6ZYK4leC8+O9beR7w0wWei8+GhUSIHn3D69ZZRewUUkXukA==", "requires": { - "@firebase/component": "0.5.14", - "@firebase/messaging": "0.9.13", - "@firebase/util": "1.6.0", + "@firebase/component": "0.5.15", + "@firebase/messaging": "0.9.14", + "@firebase/util": "1.6.1", "tslib": "^2.1.0" }, "dependencies": { @@ -50042,14 +48592,14 @@ "integrity": "sha512-DbvUl/rXAZpQeKBnwz0NYY5OCqr2nFA0Bj28Fmr3NXGqR4PAkfTOHuQlVtLO1Nudo3q0HxAYLa68ZDAcuv2uKQ==" }, "@firebase/performance": { - "version": "0.5.9", - "resolved": "https://registry.npmjs.org/@firebase/performance/-/performance-0.5.9.tgz", - "integrity": "sha512-cA1pea1hkIZt0FG0a42tjKQNBhdY7q4apqHML92vBCS9QOOR0SHBui44IGQJRfRBGiVICHW03Q+ikSZv08g+jw==", + "version": "0.5.10", + "resolved": "https://registry.npmjs.org/@firebase/performance/-/performance-0.5.10.tgz", + "integrity": "sha512-rX+OsVMc6IIkrZqFmIjvEfRuRJ84ftPJDDpnqZ134pqTPr3MQgRzU/gPgLio8EdUN5YCthWyA8nB8NrEzBysSA==", "requires": { - "@firebase/component": "0.5.14", - "@firebase/installations": "0.5.9", - "@firebase/logger": "0.3.2", - "@firebase/util": "1.6.0", + "@firebase/component": "0.5.15", + "@firebase/installations": "0.5.10", + "@firebase/logger": "0.3.3", + "@firebase/util": "1.6.1", "tslib": "^2.1.0" }, "dependencies": { @@ -50061,15 +48611,15 @@ } }, "@firebase/performance-compat": { - "version": "0.1.9", - "resolved": "https://registry.npmjs.org/@firebase/performance-compat/-/performance-compat-0.1.9.tgz", - "integrity": "sha512-EBX4u/uK76ikJSyoWZ2cEMj63G01w1DA68KDpSypSMhKPJE2eiCtWABRTSXhcaisq/FDwZzl4XhNjDyfzArwhA==", + "version": "0.1.10", + "resolved": "https://registry.npmjs.org/@firebase/performance-compat/-/performance-compat-0.1.10.tgz", + "integrity": "sha512-WhY2pjpXHiyRfnk9t3/BKGK/C0u4pC61mEYh8t8MLayz8KwuiavJj1wuCN2nG2R0y8CXZAsifFLQs1h0K3XzDA==", "requires": { - "@firebase/component": "0.5.14", - "@firebase/logger": "0.3.2", - "@firebase/performance": "0.5.9", + "@firebase/component": "0.5.15", + "@firebase/logger": "0.3.3", + "@firebase/performance": "0.5.10", "@firebase/performance-types": "0.1.0", - "@firebase/util": "1.6.0", + "@firebase/util": "1.6.1", "tslib": "^2.1.0" }, "dependencies": { @@ -50108,14 +48658,14 @@ } }, "@firebase/remote-config": { - "version": "0.3.8", - "resolved": "https://registry.npmjs.org/@firebase/remote-config/-/remote-config-0.3.8.tgz", - "integrity": "sha512-z5HYrjrgzkR25nlvQqiPowDGatlEJirA5sN1B6rOy+KYMLsb6IXLVOdKjj/Tg/uHAErwd0DblGxwBUZKTCuo1g==", + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/@firebase/remote-config/-/remote-config-0.3.9.tgz", + "integrity": "sha512-SQ7tArNyI3sPlbmyAB3X2rS8lHcVlPWIQPRLCmgpKjPKM6Jsv7onCUK+M23DW95iEjK4vEVU5QkxUP3fUXWkxg==", "requires": { - "@firebase/component": "0.5.14", - "@firebase/installations": "0.5.9", - "@firebase/logger": "0.3.2", - "@firebase/util": "1.6.0", + "@firebase/component": "0.5.15", + "@firebase/installations": "0.5.10", + "@firebase/logger": "0.3.3", + "@firebase/util": "1.6.1", "tslib": "^2.1.0" }, "dependencies": { @@ -50127,15 +48677,15 @@ } }, "@firebase/remote-config-compat": { - "version": "0.1.9", - "resolved": "https://registry.npmjs.org/@firebase/remote-config-compat/-/remote-config-compat-0.1.9.tgz", - "integrity": "sha512-ud4yINy8cegE82KoBDXS4fOp6qwy0+7zl0k587kMXHSWHbWVRZ/uKMQGJQc7kG0EQp0tZhM20CxVwtcCGsABBA==", + "version": "0.1.10", + "resolved": "https://registry.npmjs.org/@firebase/remote-config-compat/-/remote-config-compat-0.1.10.tgz", + "integrity": "sha512-FSZg9JqgnYIDV78J74W6JUANGjrzCgTRKHioBifONo3e2CdEqQKrvIuGCXEE9+9vYyuqNEtmv5DUIPC4n6XYCQ==", "requires": { - "@firebase/component": "0.5.14", - "@firebase/logger": "0.3.2", - "@firebase/remote-config": "0.3.8", + "@firebase/component": "0.5.15", + "@firebase/logger": "0.3.3", + "@firebase/remote-config": "0.3.9", "@firebase/remote-config-types": "0.2.0", - "@firebase/util": "1.6.0", + "@firebase/util": "1.6.1", "tslib": "^2.1.0" }, "dependencies": { @@ -50152,12 +48702,12 @@ "integrity": "sha512-hqK5sCPeZvcHQ1D6VjJZdW6EexLTXNMJfPdTwbD8NrXUw6UjWC4KWhLK/TSlL0QPsQtcKRkaaoP+9QCgKfMFPw==" }, "@firebase/storage": { - "version": "0.9.6", - "resolved": "https://registry.npmjs.org/@firebase/storage/-/storage-0.9.6.tgz", - "integrity": "sha512-q8/s3qFbFl+AlKbyEtGA7FRVhcMu3NKPqHueBTn5XSI0B3bfxptBcDJMb9txs69ppve6P3jrK1//TEWpjTGJUg==", + "version": "0.9.7", + "resolved": "https://registry.npmjs.org/@firebase/storage/-/storage-0.9.7.tgz", + "integrity": "sha512-0unWzgx5bceyO3SX/ilHaxwwHidN5sXZGakFLjAn8cbpjVpmybcKaLOduBxlMXeDCdUFfO8FcvEajFkV+0t2hA==", "requires": { - "@firebase/component": "0.5.14", - "@firebase/util": "1.6.0", + "@firebase/component": "0.5.15", + "@firebase/util": "1.6.1", "node-fetch": "2.6.7", "tslib": "^2.1.0" }, @@ -50170,14 +48720,14 @@ } }, "@firebase/storage-compat": { - "version": "0.1.14", - "resolved": "https://registry.npmjs.org/@firebase/storage-compat/-/storage-compat-0.1.14.tgz", - "integrity": "sha512-/Fey1n+ryIeAEyd/qXPXh32ReFZUhzE5W0z/+LDA+3yyMGw/a6wCzQqe7wBiGiCRhjd+5XiV++jkCXTflun3Dg==", + "version": "0.1.15", + "resolved": "https://registry.npmjs.org/@firebase/storage-compat/-/storage-compat-0.1.15.tgz", + "integrity": "sha512-XjqAYIc8oJv6OAeeLdCUC3KF0wXAzRoBGktRhPMc9umSxVE7Dnr960kF6qtdAbLFGi/uhj478AdpKSQgZ75rQA==", "requires": { - "@firebase/component": "0.5.14", - "@firebase/storage": "0.9.6", + "@firebase/component": "0.5.15", + "@firebase/storage": "0.9.7", "@firebase/storage-types": "0.6.0", - "@firebase/util": "1.6.0", + "@firebase/util": "1.6.1", "tslib": "^2.1.0" }, "dependencies": { @@ -50195,9 +48745,9 @@ "requires": {} }, "@firebase/util": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/@firebase/util/-/util-1.6.0.tgz", - "integrity": "sha512-6+hhqb4Zzjoo12xofTDHPkgW3FnN4ydBsjd5X2KuQI268DR3W3Ld64W/gkKPZrKRgUxeNeb+pykfP3qRe7q+vA==", + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/@firebase/util/-/util-1.6.1.tgz", + "integrity": "sha512-+eDE6uG5GgvXYHbAzfP1mpJUX1VDBD+A8CjBeBoNAKAVAApMSDxDODqRcOq7NW7kFJXSUkMzDJWhnUIifX2R8w==", "requires": { "tslib": "^2.1.0" }, @@ -50210,9 +48760,9 @@ } }, "@firebase/webchannel-wrapper": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/@firebase/webchannel-wrapper/-/webchannel-wrapper-0.6.1.tgz", - "integrity": "sha512-9FqhNjKQWpQ3fGnSOCovHOm+yhhiorKEqYLAfd525jWavunDJcx8rOW6i6ozAh+FbwcYMkL7b+3j4UR/30MpoQ==" + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/@firebase/webchannel-wrapper/-/webchannel-wrapper-0.6.2.tgz", + "integrity": "sha512-zThUKcqIU6utWzM93uEvhlh8qj8A5LMPFJPvk/ODb+8GSSif19xM2Lw1M2ijyBy8+6skSkQBbavPzOU5Oh/8tQ==" }, "@formatjs/ecma402-abstract": { "version": "1.11.4", @@ -51897,6 +50447,23 @@ "integrity": "sha512-H1rQc1ZOHANWBvPcW+JpGwr+juXSxM8Q8YCkm3GhZd8REu1fHR3z99CErO1p9pkcfcxZnMdIZdIsXkOHY0NilA==", "dev": true }, + "@monaco-editor/loader": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/@monaco-editor/loader/-/loader-1.3.2.tgz", + "integrity": "sha512-BTDbpHl3e47r3AAtpfVFTlAi7WXv4UQ/xZmz8atKl4q7epQV5e7+JbigFDViWF71VBi4IIBdcWP57Hj+OWuc9g==", + "requires": { + "state-local": "^1.0.6" + } + }, + "@monaco-editor/react": { + "version": "4.4.5", + "resolved": "https://registry.npmjs.org/@monaco-editor/react/-/react-4.4.5.tgz", + "integrity": "sha512-IImtzU7sRc66OOaQVCG+5PFHkSWnnhrUWGBuH6zNmH2h0YgmAhcjHZQc/6MY9JWEbUtVF1WPBMJ9u1XuFbRrVA==", + "requires": { + "@monaco-editor/loader": "^1.3.2", + "prop-types": "^15.7.2" + } + }, "@mrmlnc/readdir-enhanced": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/@mrmlnc/readdir-enhanced/-/readdir-enhanced-2.2.1.tgz", @@ -51980,23 +50547,6 @@ } } }, - "@popmotion/easing": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@popmotion/easing/-/easing-1.0.2.tgz", - "integrity": "sha512-IkdW0TNmRnWTeWI7aGQIVDbKXPWHVEYdGgd5ZR4SH/Ty/61p63jCjrPxX1XrR7IGkl08bjhJROStD7j+RKgoIw==" - }, - "@popmotion/popcorn": { - "version": "0.4.4", - "resolved": "https://registry.npmjs.org/@popmotion/popcorn/-/popcorn-0.4.4.tgz", - "integrity": "sha512-jYO/8319fKoNLMlY4ZJPiPu8Ea8occYwRZhxpaNn/kZsK4QG2E7XFlXZMJBsTWDw7I1i0uaqyC4zn1nwEezLzg==", - "requires": { - "@popmotion/easing": "^1.0.1", - "framesync": "^4.0.1", - "hey-listen": "^1.0.8", - "style-value-types": "^3.1.7", - "tslib": "^1.10.0" - } - }, "@protobufjs/aspromise": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", @@ -52932,18 +51482,18 @@ "dev": true }, "@storybook/addon-actions": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/addon-actions/-/addon-actions-6.5.7.tgz", - "integrity": "sha512-gTkPr2FYX+vySZKEg5Wq7uHPkVUq3hJ7ZKvGls+/xjgaTwfu3iIly53FEFUl8A6kMQ+4gtTC+YRr3cSJgXMbAg==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/addon-actions/-/addon-actions-6.5.8.tgz", + "integrity": "sha512-9ciR1iWBTzQNBDlq0iQs9+TV7gng+FbQxW5mHNxNvT9SxY1dt02wCPHZeVE/5la61wBXZs/zpEepZA93VzVBDw==", "dev": true, "requires": { - "@storybook/addons": "6.5.7", - "@storybook/api": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/components": "6.5.7", - "@storybook/core-events": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/api": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/components": "6.5.8", + "@storybook/core-events": "6.5.8", "@storybook/csf": "0.0.2--canary.4566f4d.1", - "@storybook/theming": "6.5.7", + "@storybook/theming": "6.5.8", "core-js": "^3.8.2", "fast-deep-equal": "^3.1.3", "global": "^4.4.0", @@ -52959,18 +51509,18 @@ } }, "@storybook/addon-backgrounds": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/addon-backgrounds/-/addon-backgrounds-6.5.7.tgz", - "integrity": "sha512-ryisDpxbIEZbYJkQWU5xvsj940jhWrWizedFsY9g/qBIBi33UrW/H1hKZQtmg0bzuNTgYcBjRy50ikJgH/eKAQ==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/addon-backgrounds/-/addon-backgrounds-6.5.8.tgz", + "integrity": "sha512-pvlP5ZVVfd2sWzgCqG/f6RJX/h9648znYbzaLQ4Z6whQIFobP3H3/cj9k/RTy3uXg5vC0IWDHSEaCXgin2sW1Q==", "dev": true, "requires": { - "@storybook/addons": "6.5.7", - "@storybook/api": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/components": "6.5.7", - "@storybook/core-events": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/api": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/components": "6.5.8", + "@storybook/core-events": "6.5.8", "@storybook/csf": "0.0.2--canary.4566f4d.1", - "@storybook/theming": "6.5.7", + "@storybook/theming": "6.5.8", "core-js": "^3.8.2", "global": "^4.4.0", "memoizerific": "^1.11.3", @@ -52980,49 +51530,49 @@ } }, "@storybook/addon-controls": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/addon-controls/-/addon-controls-6.5.7.tgz", - "integrity": "sha512-1JGphHk1gcLLpkft/D5BkygXwelSdWQqvXnfFc62BVqvzxv8hCF4zuUosKLWMlB/nzVbd6W4oEDV/Mqmt6h/7w==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/addon-controls/-/addon-controls-6.5.8.tgz", + "integrity": "sha512-fB6p5DgVHUnJKUzOlT2mtvaSCincnO+vuYLyf++f+l4BlYK1Es9HNl/puaRoMgdW+LoGJjXPTIMcMByeHVIt6Q==", "dev": true, "requires": { - "@storybook/addons": "6.5.7", - "@storybook/api": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/components": "6.5.7", - "@storybook/core-common": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/api": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/components": "6.5.8", + "@storybook/core-common": "6.5.8", "@storybook/csf": "0.0.2--canary.4566f4d.1", - "@storybook/node-logger": "6.5.7", - "@storybook/store": "6.5.7", - "@storybook/theming": "6.5.7", + "@storybook/node-logger": "6.5.8", + "@storybook/store": "6.5.8", + "@storybook/theming": "6.5.8", "core-js": "^3.8.2", "lodash": "^4.17.21", "ts-dedent": "^2.0.0" } }, "@storybook/addon-docs": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/addon-docs/-/addon-docs-6.5.7.tgz", - "integrity": "sha512-RghRpimJOJl9c/H6qvCCD0zHLETBIVWXsdYJF8GiY6iTKd+tgQYizuuoBT4f3PAMEMHVhmvWSjkkFLxKxzQLjQ==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/addon-docs/-/addon-docs-6.5.8.tgz", + "integrity": "sha512-pAvWwh5YCrsW9nHCrd5BpFigvqn92JisX0aEnwAqKC9B1AW1LxhdPn1o9CQCeszQGaq163RA6AzkCejvAqhtUQ==", "dev": true, "requires": { "@babel/plugin-transform-react-jsx": "^7.12.12", "@babel/preset-env": "^7.12.11", "@jest/transform": "^26.6.2", "@mdx-js/react": "^1.6.22", - "@storybook/addons": "6.5.7", - "@storybook/api": "6.5.7", - "@storybook/components": "6.5.7", - "@storybook/core-common": "6.5.7", - "@storybook/core-events": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/api": "6.5.8", + "@storybook/components": "6.5.8", + "@storybook/core-common": "6.5.8", + "@storybook/core-events": "6.5.8", "@storybook/csf": "0.0.2--canary.4566f4d.1", - "@storybook/docs-tools": "6.5.7", + "@storybook/docs-tools": "6.5.8", "@storybook/mdx1-csf": "^0.0.1", - "@storybook/node-logger": "6.5.7", - "@storybook/postinstall": "6.5.7", - "@storybook/preview-web": "6.5.7", - "@storybook/source-loader": "6.5.7", - "@storybook/store": "6.5.7", - "@storybook/theming": "6.5.7", + "@storybook/node-logger": "6.5.8", + "@storybook/postinstall": "6.5.8", + "@storybook/preview-web": "6.5.8", + "@storybook/source-loader": "6.5.8", + "@storybook/store": "6.5.8", + "@storybook/theming": "6.5.8", "babel-loader": "^8.0.0", "core-js": "^3.8.2", "fast-deep-equal": "^3.1.3", @@ -53036,55 +51586,55 @@ } }, "@storybook/addon-essentials": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/addon-essentials/-/addon-essentials-6.5.7.tgz", - "integrity": "sha512-JZ80W9PgZOEUp2SjhBYyYHxQduxSIe4n9Wdoy8XDtV28152jDNms6UPjFeEVb+a9rVybYOwWnOnEhBWF6ZfJ/g==", - "dev": true, - "requires": { - "@storybook/addon-actions": "6.5.7", - "@storybook/addon-backgrounds": "6.5.7", - "@storybook/addon-controls": "6.5.7", - "@storybook/addon-docs": "6.5.7", - "@storybook/addon-measure": "6.5.7", - "@storybook/addon-outline": "6.5.7", - "@storybook/addon-toolbars": "6.5.7", - "@storybook/addon-viewport": "6.5.7", - "@storybook/addons": "6.5.7", - "@storybook/api": "6.5.7", - "@storybook/core-common": "6.5.7", - "@storybook/node-logger": "6.5.7", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/addon-essentials/-/addon-essentials-6.5.8.tgz", + "integrity": "sha512-K/Aw/GLugCz5/S3c2tz5lnfC8aN6dSoQQDr8xaMDcBlT9h/xZ1l4jQQnx/mvY/qEvXtexBF41DE6ROWGKSZeSg==", + "dev": true, + "requires": { + "@storybook/addon-actions": "6.5.8", + "@storybook/addon-backgrounds": "6.5.8", + "@storybook/addon-controls": "6.5.8", + "@storybook/addon-docs": "6.5.8", + "@storybook/addon-measure": "6.5.8", + "@storybook/addon-outline": "6.5.8", + "@storybook/addon-toolbars": "6.5.8", + "@storybook/addon-viewport": "6.5.8", + "@storybook/addons": "6.5.8", + "@storybook/api": "6.5.8", + "@storybook/core-common": "6.5.8", + "@storybook/node-logger": "6.5.8", "core-js": "^3.8.2", "regenerator-runtime": "^0.13.7", "ts-dedent": "^2.0.0" } }, "@storybook/addon-measure": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/addon-measure/-/addon-measure-6.5.7.tgz", - "integrity": "sha512-NMth6CErySKQ9WnfzMZ4nelHa2bBzZ60ZgsDq5s5dKHhJzZPm2nclmGAGE+VhqI/USe8b1fnjKFeHH485T8J2g==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/addon-measure/-/addon-measure-6.5.8.tgz", + "integrity": "sha512-zpNAt1XwBLnQ3OjCfj63J7vC2WCTyAjvbGVAsUkpQb21vr/e3sPFQZPKGwio85SYjIX7AJ+Oi28mbEwWzS8wFA==", "dev": true, "requires": { - "@storybook/addons": "6.5.7", - "@storybook/api": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/components": "6.5.7", - "@storybook/core-events": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/api": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/components": "6.5.8", + "@storybook/core-events": "6.5.8", "@storybook/csf": "0.0.2--canary.4566f4d.1", "core-js": "^3.8.2", "global": "^4.4.0" } }, "@storybook/addon-outline": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/addon-outline/-/addon-outline-6.5.7.tgz", - "integrity": "sha512-qTu19FnZz+rjY7SxPOgiQkuAxHRNRhUYgvUwI+ep0ZQcBddsRgniQjzXtErlUMeVoMZ63mDuOaJp67ltkriAOQ==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/addon-outline/-/addon-outline-6.5.8.tgz", + "integrity": "sha512-/bEjYTVJNM5QEiguS5nVQlerl5NhgOod1zLExnkchc8+FTJC58Vy7CRfzr2iaIMuf1QRPqBwSIy6ZqLJOdUfnQ==", "dev": true, "requires": { - "@storybook/addons": "6.5.7", - "@storybook/api": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/components": "6.5.7", - "@storybook/core-events": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/api": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/components": "6.5.8", + "@storybook/core-events": "6.5.8", "@storybook/csf": "0.0.2--canary.4566f4d.1", "core-js": "^3.8.2", "global": "^4.4.0", @@ -53093,32 +51643,32 @@ } }, "@storybook/addon-toolbars": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/addon-toolbars/-/addon-toolbars-6.5.7.tgz", - "integrity": "sha512-+MUG5t4isQNf+q7BpEsGwuYAvYgs9XTdzzdvL/9jedQ7udJsWmG1q9a6m9+iQGPr/WK+88F2kgSOknpib3J21w==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/addon-toolbars/-/addon-toolbars-6.5.8.tgz", + "integrity": "sha512-16eRbbtn4/cH1xU8JlPZRdShwUwSsPcqpyH1JNl+rgYQ6SaSNq3aO/jDFeQe93guSD0YPRWHz8dKtn6OxVeozQ==", "dev": true, "requires": { - "@storybook/addons": "6.5.7", - "@storybook/api": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/components": "6.5.7", - "@storybook/theming": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/api": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/components": "6.5.8", + "@storybook/theming": "6.5.8", "core-js": "^3.8.2", "regenerator-runtime": "^0.13.7" } }, "@storybook/addon-viewport": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/addon-viewport/-/addon-viewport-6.5.7.tgz", - "integrity": "sha512-8VmSTGKY3+9kZ09THC7546OaFbjLu5kEAGU5ZFSZaNlsJwRg7bC3bScKbnyX5EhihgZ3W8oJt/eMAIqXKHxA8g==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/addon-viewport/-/addon-viewport-6.5.8.tgz", + "integrity": "sha512-MTpZWkBWNPH55iNHK4tBNKTdew5xKfoNvOj0pZn1rYDHlylMTlq7aoccwRjjK2jZeHHNnb1rm6ZkQDjmYu0Tcw==", "dev": true, "requires": { - "@storybook/addons": "6.5.7", - "@storybook/api": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/components": "6.5.7", - "@storybook/core-events": "6.5.7", - "@storybook/theming": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/api": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/components": "6.5.8", + "@storybook/core-events": "6.5.8", + "@storybook/theming": "6.5.8", "core-js": "^3.8.2", "global": "^4.4.0", "memoizerific": "^1.11.3", @@ -53127,18 +51677,18 @@ } }, "@storybook/addons": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/addons/-/addons-6.5.7.tgz", - "integrity": "sha512-tUZ2c1uegUcwY31ztNQZGU/HUwAEEGIR8fEOvvO8S0TNQGoo6cwFtZmWBh3mTSRGcmzK2SNBjFHZua5Ee9TefA==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/addons/-/addons-6.5.8.tgz", + "integrity": "sha512-L3LmbsYP9tDHHvpr/yv8YuEkzym7SXp/jZ0km31tpG3EuZmgGu7MXPrZ2ymEw4PkAhQzztgRr23VTfKobGUojA==", "dev": true, "requires": { - "@storybook/api": "6.5.7", - "@storybook/channels": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/core-events": "6.5.7", + "@storybook/api": "6.5.8", + "@storybook/channels": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/core-events": "6.5.8", "@storybook/csf": "0.0.2--canary.4566f4d.1", - "@storybook/router": "6.5.7", - "@storybook/theming": "6.5.7", + "@storybook/router": "6.5.8", + "@storybook/theming": "6.5.8", "@types/webpack-env": "^1.16.0", "core-js": "^3.8.2", "global": "^4.4.0", @@ -53146,18 +51696,18 @@ } }, "@storybook/api": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/api/-/api-6.5.7.tgz", - "integrity": "sha512-QCNypz4X+lYuFW7EzvRPXMf8uS3gfSIV8sqXtEe5XoMb0HQXhy6AGU7/4iAeuUimtETqLTxq+kOxaSg4uPowxg==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/api/-/api-6.5.8.tgz", + "integrity": "sha512-/MueV+wLCvy9gFA3ih4g7QYjDmn14i+D2ydonfaEC7R+agFGXxXwJGPKkz3yBNrRpNkBwcbY9mAmv8lE2AqgqQ==", "dev": true, "requires": { - "@storybook/channels": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/core-events": "6.5.7", + "@storybook/channels": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/core-events": "6.5.8", "@storybook/csf": "0.0.2--canary.4566f4d.1", - "@storybook/router": "6.5.7", + "@storybook/router": "6.5.8", "@storybook/semver": "^7.3.2", - "@storybook/theming": "6.5.7", + "@storybook/theming": "6.5.8", "core-js": "^3.8.2", "fast-deep-equal": "^3.1.3", "global": "^4.4.0", @@ -53171,28 +51721,28 @@ } }, "@storybook/builder-webpack4": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/builder-webpack4/-/builder-webpack4-6.5.7.tgz", - "integrity": "sha512-8OB3mZ2L6kQBiAXlkhna/MHREXIPtqXi2AJLT3+bTzBlqkusH+PwMZxWHbcPl1vZrlNQBC40Elx9tdynGkVQ6g==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/builder-webpack4/-/builder-webpack4-6.5.8.tgz", + "integrity": "sha512-4/CVp/AlOxCeWZ/DF1TVS/TuzHao4l9KCq7DhL+utFEVl9c/dpgoZXc0Gy2FfHa2RXHKckrH/VUfV2KQk4TNSw==", "dev": true, "requires": { "@babel/core": "^7.12.10", - "@storybook/addons": "6.5.7", - "@storybook/api": "6.5.7", - "@storybook/channel-postmessage": "6.5.7", - "@storybook/channels": "6.5.7", - "@storybook/client-api": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/components": "6.5.7", - "@storybook/core-common": "6.5.7", - "@storybook/core-events": "6.5.7", - "@storybook/node-logger": "6.5.7", - "@storybook/preview-web": "6.5.7", - "@storybook/router": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/api": "6.5.8", + "@storybook/channel-postmessage": "6.5.8", + "@storybook/channels": "6.5.8", + "@storybook/client-api": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/components": "6.5.8", + "@storybook/core-common": "6.5.8", + "@storybook/core-events": "6.5.8", + "@storybook/node-logger": "6.5.8", + "@storybook/preview-web": "6.5.8", + "@storybook/router": "6.5.8", "@storybook/semver": "^7.3.2", - "@storybook/store": "6.5.7", - "@storybook/theming": "6.5.7", - "@storybook/ui": "6.5.7", + "@storybook/store": "6.5.8", + "@storybook/theming": "6.5.8", + "@storybook/ui": "6.5.8", "@types/node": "^14.0.10 || ^16.0.0", "@types/webpack": "^4.41.26", "autoprefixer": "^9.8.6", @@ -53757,7 +52307,7 @@ "to-regex-range": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", - "integrity": "sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", "dev": true, "requires": { "is-number": "^3.0.0", @@ -53842,27 +52392,27 @@ } }, "@storybook/builder-webpack5": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/builder-webpack5/-/builder-webpack5-6.5.7.tgz", - "integrity": "sha512-3mbQ09KBTUsFYxnEtR4vr7W1wodRen3o8fANY5XxvE1sr1TopHVOKpIlePjrpcrXcLKFI/ZWrX3IfK88LCuI9w==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/builder-webpack5/-/builder-webpack5-6.5.8.tgz", + "integrity": "sha512-bc7LSGzOqTUImejsfjWAHEHwBreoPQKS6pfnWYkjKMvfvWOwlHSAxwOSM5DyS4cvpcpMDG8yBJNz2QcvXFVLxA==", "dev": true, "requires": { "@babel/core": "^7.12.10", - "@storybook/addons": "6.5.7", - "@storybook/api": "6.5.7", - "@storybook/channel-postmessage": "6.5.7", - "@storybook/channels": "6.5.7", - "@storybook/client-api": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/components": "6.5.7", - "@storybook/core-common": "6.5.7", - "@storybook/core-events": "6.5.7", - "@storybook/node-logger": "6.5.7", - "@storybook/preview-web": "6.5.7", - "@storybook/router": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/api": "6.5.8", + "@storybook/channel-postmessage": "6.5.8", + "@storybook/channels": "6.5.8", + "@storybook/client-api": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/components": "6.5.8", + "@storybook/core-common": "6.5.8", + "@storybook/core-events": "6.5.8", + "@storybook/node-logger": "6.5.8", + "@storybook/preview-web": "6.5.8", + "@storybook/router": "6.5.8", "@storybook/semver": "^7.3.2", - "@storybook/store": "6.5.7", - "@storybook/theming": "6.5.7", + "@storybook/store": "6.5.8", + "@storybook/theming": "6.5.8", "@types/node": "^14.0.10 || ^16.0.0", "babel-loader": "^8.0.0", "babel-plugin-named-exports-order": "^0.0.2", @@ -53900,9 +52450,9 @@ "dev": true }, "acorn": { - "version": "8.7.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.0.tgz", - "integrity": "sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ==", + "version": "8.7.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.1.tgz", + "integrity": "sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A==", "dev": true, "optional": true, "peer": true @@ -54271,14 +52821,14 @@ } }, "@storybook/channel-postmessage": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/channel-postmessage/-/channel-postmessage-6.5.7.tgz", - "integrity": "sha512-X4UPgm4O0503CsSnqAM1ht/6R9ofnoMcqFZxYRu9PSvHlhaFR9V9AU4VjQhakH7alFzRsAhcAV2PFVTAdWhgtA==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/channel-postmessage/-/channel-postmessage-6.5.8.tgz", + "integrity": "sha512-6IkIKk+UMYKk05vN8gWHvvOV/EZNXpQG/5gesGDALjkCyvRmcktHak1a9tHpoihZ3L7/gDwXOZraCZmuy8vBcQ==", "dev": true, "requires": { - "@storybook/channels": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/core-events": "6.5.7", + "@storybook/channels": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/core-events": "6.5.8", "core-js": "^3.8.2", "global": "^4.4.0", "qs": "^6.10.0", @@ -54286,22 +52836,22 @@ } }, "@storybook/channel-websocket": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/channel-websocket/-/channel-websocket-6.5.7.tgz", - "integrity": "sha512-C+l6t3ZgHzU8gL8GJ8c4GMttJglGJIwq1LtJJKnGzx2kJCD0HRMMqc/qFS2K2EwP99hLwwGIlCpom3UZ1aEanA==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/channel-websocket/-/channel-websocket-6.5.8.tgz", + "integrity": "sha512-lAtvgO0FWsyS3u7uFbsGIYp2aSWJfWU/LOtc3x1K5c84JJAd9fncYkyZMwP1gMbdNgYxJoxe8HXtVtfeNegPuQ==", "dev": true, "requires": { - "@storybook/channels": "6.5.7", - "@storybook/client-logger": "6.5.7", + "@storybook/channels": "6.5.8", + "@storybook/client-logger": "6.5.8", "core-js": "^3.8.2", "global": "^4.4.0", "telejson": "^6.0.8" } }, "@storybook/channels": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/channels/-/channels-6.5.7.tgz", - "integrity": "sha512-v880fWBpWgiWrDmZesTIstNfMZhrPfgXAtLNcL5Z89NAPahsHskOSszc0BDxKN3gb+ZeTKUqHxY57dQdp+1rhg==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/channels/-/channels-6.5.8.tgz", + "integrity": "sha512-fNql1lEIvWlI1NiRtwFMWOOvfW6qxgeSP6xoqiAJ0b+QYegEFG9UxJDuEvVHq++S81FulgQ5U+p+5R9XSV19tQ==", "dev": true, "requires": { "core-js": "^3.8.2", @@ -54310,18 +52860,18 @@ } }, "@storybook/client-api": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/client-api/-/client-api-6.5.7.tgz", - "integrity": "sha512-na8NZhB6GnAGp3jRTV9wwue3WGwSZoi5jfxrKSYMPL/s/2n07/soixHggqueBDXuNBrPoJaXbY/nRHmSjLwxtQ==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/client-api/-/client-api-6.5.8.tgz", + "integrity": "sha512-mdU+qQ4+T2OUbEnl+3MWRKxEPju/EOIUg66hMgmif8c5u7YFYBFulUMUYLICMjll8Jlu+37+g+qO3K2eEz6CEw==", "dev": true, "requires": { - "@storybook/addons": "6.5.7", - "@storybook/channel-postmessage": "6.5.7", - "@storybook/channels": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/core-events": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/channel-postmessage": "6.5.8", + "@storybook/channels": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/core-events": "6.5.8", "@storybook/csf": "0.0.2--canary.4566f4d.1", - "@storybook/store": "6.5.7", + "@storybook/store": "6.5.8", "@types/qs": "^6.9.5", "@types/webpack-env": "^1.16.0", "core-js": "^3.8.2", @@ -54338,9 +52888,9 @@ } }, "@storybook/client-logger": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/client-logger/-/client-logger-6.5.7.tgz", - "integrity": "sha512-ycDy1kXeXRg3djSTXRGMVxc0kvaWw/UhHDs2VGFmOPScsoeWpdbePHXJMFbsqippxuexpsofqTryBwH2b6BPhw==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/client-logger/-/client-logger-6.5.8.tgz", + "integrity": "sha512-dH6HSaVuOIMHy1+rpsqcD3SJxVZEEbuEtsNpdUGwLJaIuduhUJJpM2xQfUW0siZDyrgwoa+znll+G0YNUbv7sg==", "dev": true, "requires": { "core-js": "^3.8.2", @@ -54348,14 +52898,14 @@ } }, "@storybook/components": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/components/-/components-6.5.7.tgz", - "integrity": "sha512-xSOaOK8q6bXYkmN4LZKucvXU2HRHqKwwTafFDh5yzsCSEB2VQIJlyo4ePVyv/GJgBUX6+WdSA7c5r5ePXK6IYQ==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/components/-/components-6.5.8.tgz", + "integrity": "sha512-YE+LZ1/GXoqertxodsf+L9ehcohbICRAxgE/iNqc7MZfk95SD3XRSUbxhCpGe8QTIZJpzs1tK4LFZ3Fg5w/+Lg==", "dev": true, "requires": { - "@storybook/client-logger": "6.5.7", + "@storybook/client-logger": "6.5.8", "@storybook/csf": "0.0.2--canary.4566f4d.1", - "@storybook/theming": "6.5.7", + "@storybook/theming": "6.5.8", "@types/react-syntax-highlighter": "11.0.5", "core-js": "^3.8.2", "qs": "^6.10.0", @@ -54365,31 +52915,31 @@ } }, "@storybook/core": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/core/-/core-6.5.7.tgz", - "integrity": "sha512-YSu2qur1E5y9rjVspchtCfupPT3y1XyjBInhwzo8jC3rvm2WY0RS80VQU3dga4QBllO1M+cDmLzmOEPL82+Juw==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/core/-/core-6.5.8.tgz", + "integrity": "sha512-+Fv4n1E5N4Avty9GcRbz4vB2IWH//se2OUU+RTT3vneCOGjyus5bj0Or6GU5wef5UGuvHF78mHg/frhWpguzsw==", "dev": true, "requires": { - "@storybook/core-client": "6.5.7", - "@storybook/core-server": "6.5.7" + "@storybook/core-client": "6.5.8", + "@storybook/core-server": "6.5.8" } }, "@storybook/core-client": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/core-client/-/core-client-6.5.7.tgz", - "integrity": "sha512-GL7m33tpEyornhfnTddbvDuLkA9EMe1zKv9oZGsUYo78cWRTiEibYyHegIi9/ThplRXvpFR/5uHY4Zx5Z5rxJg==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/core-client/-/core-client-6.5.8.tgz", + "integrity": "sha512-8x8qKQ2clvpfDcoWrNBmQ8Xt9z/i32TFIBp4PEZMcbB7eqo517nzfllLiXDipiJgO7BGxKtY5CRHQ9pAU9G27A==", "dev": true, "requires": { - "@storybook/addons": "6.5.7", - "@storybook/channel-postmessage": "6.5.7", - "@storybook/channel-websocket": "6.5.7", - "@storybook/client-api": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/core-events": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/channel-postmessage": "6.5.8", + "@storybook/channel-websocket": "6.5.8", + "@storybook/client-api": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/core-events": "6.5.8", "@storybook/csf": "0.0.2--canary.4566f4d.1", - "@storybook/preview-web": "6.5.7", - "@storybook/store": "6.5.7", - "@storybook/ui": "6.5.7", + "@storybook/preview-web": "6.5.8", + "@storybook/store": "6.5.8", + "@storybook/ui": "6.5.8", "airbnb-js-shims": "^2.2.1", "ansi-to-html": "^0.6.11", "core-js": "^3.8.2", @@ -54403,9 +52953,9 @@ } }, "@storybook/core-common": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/core-common/-/core-common-6.5.7.tgz", - "integrity": "sha512-/b1oQlmhek8tKDu9ky2O1oEk9g2giAPpl192yRz4lIxap5CFJ7RCfgbkq+F3JBXnH2P84BufC0x3dj4jvBhxCw==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/core-common/-/core-common-6.5.8.tgz", + "integrity": "sha512-ELGKLMx1d0oEA2LT+fsmo85X2RNE1EO+It7B1bw//g7jyf1hmZ7t3lXMZUCqt7eml1qy1N72LDkfmmU+H9H6ww==", "dev": true, "requires": { "@babel/core": "^7.12.10", @@ -54430,7 +52980,7 @@ "@babel/preset-react": "^7.12.10", "@babel/preset-typescript": "^7.12.7", "@babel/register": "^7.12.1", - "@storybook/node-logger": "6.5.7", + "@storybook/node-logger": "6.5.8", "@storybook/semver": "^7.3.2", "@types/node": "^14.0.10 || ^16.0.0", "@types/pretty-hrtime": "^1.0.0", @@ -55077,7 +53627,7 @@ "to-regex-range": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", - "integrity": "sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", "dev": true, "requires": { "is-number": "^3.0.0", @@ -55142,32 +53692,32 @@ } }, "@storybook/core-events": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/core-events/-/core-events-6.5.7.tgz", - "integrity": "sha512-epqYy67Ypry5QdCt7FpN57/X9uuS7R2+DLFORZIpL/SJG1dIdN4POQ1icWOhPzHl+eiSgaV7e2oPaUsN+LPhJQ==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/core-events/-/core-events-6.5.8.tgz", + "integrity": "sha512-lzG4Lg65WFYvjs2k/E3CP4+eyPexEGrDyRMO9Pbj9H9x+eosYptauEbT/wXF83bmUWZKLWWVUAZX7hDcxBO8cw==", "dev": true, "requires": { "core-js": "^3.8.2" } }, "@storybook/core-server": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/core-server/-/core-server-6.5.7.tgz", - "integrity": "sha512-CGwFZ5kmKaCS/+tcrAbqQu4Owq86wXkWRapJB55S8AlUsf3c9gEC8a3+Ed9tZUlmjSH56CnDDfmt7AleToaQ9w==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/core-server/-/core-server-6.5.8.tgz", + "integrity": "sha512-ti7+MW1xzD9O0JLwgZTwulxhJx5YGPNu+hRpGhJSjKrqGX1h6K6ilmkBSHvyLqpiE+F4mxvqb5Rx3KBIEdEgbw==", "dev": true, "requires": { "@discoveryjs/json-ext": "^0.5.3", - "@storybook/builder-webpack4": "6.5.7", - "@storybook/core-client": "6.5.7", - "@storybook/core-common": "6.5.7", - "@storybook/core-events": "6.5.7", + "@storybook/builder-webpack4": "6.5.8", + "@storybook/core-client": "6.5.8", + "@storybook/core-common": "6.5.8", + "@storybook/core-events": "6.5.8", "@storybook/csf": "0.0.2--canary.4566f4d.1", - "@storybook/csf-tools": "6.5.7", - "@storybook/manager-webpack4": "6.5.7", - "@storybook/node-logger": "6.5.7", + "@storybook/csf-tools": "6.5.8", + "@storybook/manager-webpack4": "6.5.8", + "@storybook/node-logger": "6.5.8", "@storybook/semver": "^7.3.2", - "@storybook/store": "6.5.7", - "@storybook/telemetry": "6.5.7", + "@storybook/store": "6.5.8", + "@storybook/telemetry": "6.5.8", "@types/node": "^14.0.10 || ^16.0.0", "@types/node-fetch": "^2.5.7", "@types/pretty-hrtime": "^1.0.0", @@ -55757,7 +54307,7 @@ "to-regex-range": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", - "integrity": "sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", "dev": true, "requires": { "is-number": "^3.0.0", @@ -55816,9 +54366,9 @@ } }, "ws": { - "version": "8.7.0", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.7.0.tgz", - "integrity": "sha512-c2gsP0PRwcLFzUiA8Mkr37/MI7ilIlHQxaEAtd0uNMbVMoy8puJyafRlm0bV9MbGSabUPeLrRRaqIBcFcA2Pqg==", + "version": "8.8.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.8.0.tgz", + "integrity": "sha512-JDAgSYQ1ksuwqfChJusw1LSJ8BizJ2e/vVu5Lxjq3YvNJNlROv1ui4i+c/kUUrPheBvQl4c5UbERhTwKa6QBJQ==", "dev": true, "requires": {} }, @@ -55840,9 +54390,9 @@ } }, "@storybook/csf-tools": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/csf-tools/-/csf-tools-6.5.7.tgz", - "integrity": "sha512-/vBaknzD8c7H/Zsz0gwhmlNlMwe5slZwXadi6rAQXDkKLzaR1kmz4cQFs8yDR1wWpXaGjNvQxOUAGYjFoGQxzA==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/csf-tools/-/csf-tools-6.5.8.tgz", + "integrity": "sha512-4VrjIMxKcp29OFSMDub52aQOMP4EvtZ5eWZkPeORRNQoJsnQaxhF9GGf71QdSaAQZhMoxdvmpA47ehrFk8Rnfw==", "dev": true, "requires": { "@babel/core": "^7.12.10", @@ -55892,14 +54442,14 @@ } }, "@storybook/docs-tools": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/docs-tools/-/docs-tools-6.5.7.tgz", - "integrity": "sha512-Aw9uUsqeuw0Z9fpiwxrstMNjNGB9s1Tm57SpMF8ibjLYBYFf5Apz5CwDX7bm6YFtCweaawx4MeQta8qnQMWCFw==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/docs-tools/-/docs-tools-6.5.8.tgz", + "integrity": "sha512-CWMW+3LSstvQmHKV5ggPR1beQZTpwCXEhfysZ9u4Yp/4fcoDIuQ7DTOK5uNFynGCGl1FG3lATriEOhEZ3bZCvQ==", "dev": true, "requires": { "@babel/core": "^7.12.10", "@storybook/csf": "0.0.2--canary.4566f4d.1", - "@storybook/store": "6.5.7", + "@storybook/store": "6.5.8", "core-js": "^3.8.2", "doctrine": "^3.0.0", "lodash": "^4.17.21", @@ -55907,20 +54457,20 @@ } }, "@storybook/manager-webpack4": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/manager-webpack4/-/manager-webpack4-6.5.7.tgz", - "integrity": "sha512-RmGsr/6PNsafaSm8aTD7e2VXSKT8BQ6Hkg6TAArLoS2TpIUvrNuM2hEqOHzm2POcApC+OE/HN1H0GiXBkH533Q==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/manager-webpack4/-/manager-webpack4-6.5.8.tgz", + "integrity": "sha512-qW5/L3cJHvtNi5ylDxObALZWaAHMsWQlPP8GRxm95NHpff4CfRo/qs7puY9ZeLmJSic0KchoHEH/8AScflLOgA==", "dev": true, "requires": { "@babel/core": "^7.12.10", "@babel/plugin-transform-template-literals": "^7.12.1", "@babel/preset-react": "^7.12.10", - "@storybook/addons": "6.5.7", - "@storybook/core-client": "6.5.7", - "@storybook/core-common": "6.5.7", - "@storybook/node-logger": "6.5.7", - "@storybook/theming": "6.5.7", - "@storybook/ui": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/core-client": "6.5.8", + "@storybook/core-common": "6.5.8", + "@storybook/node-logger": "6.5.8", + "@storybook/theming": "6.5.8", + "@storybook/ui": "6.5.8", "@types/node": "^14.0.10 || ^16.0.0", "@types/webpack": "^4.41.26", "babel-loader": "^8.0.0", @@ -56543,7 +55093,7 @@ "to-regex-range": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", - "integrity": "sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", "dev": true, "requires": { "is-number": "^3.0.0", @@ -56627,20 +55177,20 @@ } }, "@storybook/manager-webpack5": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/manager-webpack5/-/manager-webpack5-6.5.7.tgz", - "integrity": "sha512-4TZKe71noCRui8sUxSuSqO6zMnCxCLn7dE1dOlCr/UvyZbCaGWACO5olUDQrT+n1glZL8i9L998JGQroksucNw==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/manager-webpack5/-/manager-webpack5-6.5.8.tgz", + "integrity": "sha512-foW/ZvTqGZAl4TfcfGKdS3RlaBDDAgEjUCbCaVShlZRshZ8tzWBVu3JQFqbPVGslH89T5qp9DUYoN/SJqTUpcg==", "dev": true, "requires": { "@babel/core": "^7.12.10", "@babel/plugin-transform-template-literals": "^7.12.1", "@babel/preset-react": "^7.12.10", - "@storybook/addons": "6.5.7", - "@storybook/core-client": "6.5.7", - "@storybook/core-common": "6.5.7", - "@storybook/node-logger": "6.5.7", - "@storybook/theming": "6.5.7", - "@storybook/ui": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/core-client": "6.5.8", + "@storybook/core-common": "6.5.8", + "@storybook/node-logger": "6.5.8", + "@storybook/theming": "6.5.8", + "@storybook/ui": "6.5.8", "@types/node": "^14.0.10 || ^16.0.0", "babel-loader": "^8.0.0", "case-sensitive-paths-webpack-plugin": "^2.3.0", @@ -56679,9 +55229,9 @@ "dev": true }, "acorn": { - "version": "8.7.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.0.tgz", - "integrity": "sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ==", + "version": "8.7.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.1.tgz", + "integrity": "sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A==", "dev": true, "optional": true, "peer": true @@ -57204,9 +55754,9 @@ } }, "@storybook/node-logger": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/node-logger/-/node-logger-6.5.7.tgz", - "integrity": "sha512-OrHu5p2E5i7P2v2hQAOtZw6Od1e2nrP6L7w5SxUPgccUnKUD9dRX5Y8qbAcPZO3XCkMLjpjAbC1xBXG0eFkn9g==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/node-logger/-/node-logger-6.5.8.tgz", + "integrity": "sha512-BHdkSipgjnfsh4FRYbV2R0npM5gVx9JLRsDQ0KiTolRpN4SU98kT/6885zb9jZg6I0EY+UG9Qdr3fvL9VLpY1g==", "dev": true, "requires": { "@types/npmlog": "^4.1.2", @@ -57268,9 +55818,9 @@ } }, "@storybook/postinstall": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/postinstall/-/postinstall-6.5.7.tgz", - "integrity": "sha512-902JjgB2o+NiiLCPV0b4GHX9SbnY1OkvfvmkqpD3UqWh8djpkSQwvli9npM1J2NEu4BxCqbifYJI7V4JmZbdsw==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/postinstall/-/postinstall-6.5.8.tgz", + "integrity": "sha512-Z6zQnBOaMj+gHtF1XPMpwTIxYRCmh6eNirrJLrkPk5c+fKXtw6+vNCbmPvsyTGxGEHnyn/tYwe1fvwJTHDctUw==", "dev": true, "requires": { "core-js": "^3.8.2" @@ -57397,9 +55947,9 @@ "dev": true }, "type-fest": { - "version": "2.11.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.11.0.tgz", - "integrity": "sha512-GwRKR1jZMAQP/hVR929DWB5Z2lwSIM/nNcHEfDj2E0vOMhcYbqFxGKE5JaSzMdzmEtWJiamEn6VwHs/YVXVhEQ==", + "version": "2.13.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.13.0.tgz", + "integrity": "sha512-lPfAm42MxE4/456+QyIaaVBAwgpJb6xZ8PRu09utnhPdWwcyj9vgy6Sq0Z5yNbJ21EdxB5dRU/Qg8bsyAMtlcw==", "dev": true, "optional": true, "peer": true @@ -57407,17 +55957,17 @@ } }, "@storybook/preview-web": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/preview-web/-/preview-web-6.5.7.tgz", - "integrity": "sha512-EH8gdl334D8EDVL1VJjRURcUou5Sv6BwgismL4E6wjSFmWxL9egxYDnGJJEh3mjIkAtGb0zpksYn/VNWPA8c8A==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/preview-web/-/preview-web-6.5.8.tgz", + "integrity": "sha512-jEEAgvTVZfFA0B20mRJfLW6dPA5mG5PxUJtjMx6wH4Yw4+i3Sld/U63hTRt7ktpKdrcu4lX9E+PuaRLPq7S2kg==", "dev": true, "requires": { - "@storybook/addons": "6.5.7", - "@storybook/channel-postmessage": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/core-events": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/channel-postmessage": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/core-events": "6.5.8", "@storybook/csf": "0.0.2--canary.4566f4d.1", - "@storybook/store": "6.5.7", + "@storybook/store": "6.5.8", "ansi-to-html": "^0.6.11", "core-js": "^3.8.2", "global": "^4.4.0", @@ -57431,24 +55981,24 @@ } }, "@storybook/react": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/react/-/react-6.5.7.tgz", - "integrity": "sha512-jMY1vk1WL1otEODl5BxD1kSh5Eqg+SvZW5CJ7sS6q53i3teOhaGhugvuSTuV9lnBzLOZu8atIdFL0ewdOkpwsg==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/react/-/react-6.5.8.tgz", + "integrity": "sha512-LdObfhhPb9gAFBtRNb3awYJe1qMiYeda1ppkj0ZvccbV04YrmbW5bzYvfOCvU6D34ugbQJhJyWuvraO/0EJK6w==", "dev": true, "requires": { "@babel/preset-flow": "^7.12.1", "@babel/preset-react": "^7.12.10", "@pmmmwh/react-refresh-webpack-plugin": "^0.5.3", - "@storybook/addons": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/core": "6.5.7", - "@storybook/core-common": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/core": "6.5.8", + "@storybook/core-common": "6.5.8", "@storybook/csf": "0.0.2--canary.4566f4d.1", - "@storybook/docs-tools": "6.5.7", - "@storybook/node-logger": "6.5.7", + "@storybook/docs-tools": "6.5.8", + "@storybook/node-logger": "6.5.8", "@storybook/react-docgen-typescript-plugin": "1.0.2-canary.6.9d540b91e815f8fc2f8829189deb00553559ff63.0", "@storybook/semver": "^7.3.2", - "@storybook/store": "6.5.7", + "@storybook/store": "6.5.8", "@types/estree": "^0.0.51", "@types/node": "^14.14.20 || ^16.0.0", "@types/webpack-env": "^1.16.0", @@ -57626,9 +56176,9 @@ "dev": true }, "type-fest": { - "version": "2.11.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.11.0.tgz", - "integrity": "sha512-GwRKR1jZMAQP/hVR929DWB5Z2lwSIM/nNcHEfDj2E0vOMhcYbqFxGKE5JaSzMdzmEtWJiamEn6VwHs/YVXVhEQ==", + "version": "2.13.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.13.0.tgz", + "integrity": "sha512-lPfAm42MxE4/456+QyIaaVBAwgpJb6xZ8PRu09utnhPdWwcyj9vgy6Sq0Z5yNbJ21EdxB5dRU/Qg8bsyAMtlcw==", "dev": true, "optional": true, "peer": true @@ -57665,12 +56215,12 @@ } }, "@storybook/router": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/router/-/router-6.5.7.tgz", - "integrity": "sha512-edWEdAb8O0rSgdXoBZDDuNlQg2cOmC/nJ6gXj9zBotzmXqsbxWyjKGooG1dU6dnKshUqE1RmWF7/N1WMluLf0A==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/router/-/router-6.5.8.tgz", + "integrity": "sha512-tseNJpZ2ZzVYowjekUMpGJVVRMrwOkttieD9mRbHrhh+2n7b+SoMKnuLi3ow0xeOyPL8ZDng2FgRjQzQHXA5Sw==", "dev": true, "requires": { - "@storybook/client-logger": "6.5.7", + "@storybook/client-logger": "6.5.8", "core-js": "^3.8.2", "regenerator-runtime": "^0.13.7" } @@ -57686,13 +56236,13 @@ } }, "@storybook/source-loader": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/source-loader/-/source-loader-6.5.7.tgz", - "integrity": "sha512-nj24TSGdF9J1gD5Fj9Z2hPRAQwqBJoBKD/fmTSFZop0qaJOOyeuxZR5022dQh8UWWoBa3WOQADMTNi5RqQZkiA==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/source-loader/-/source-loader-6.5.8.tgz", + "integrity": "sha512-3bVxXKE2o6lS4WGga/S7WwgITxPQ96qsY+pQ1I7A+e4/cKSmZxlVWM9qfMW2ScmHTVoZE0Ujsmn6DWftxzCyrQ==", "dev": true, "requires": { - "@storybook/addons": "6.5.7", - "@storybook/client-logger": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/client-logger": "6.5.8", "@storybook/csf": "0.0.2--canary.4566f4d.1", "core-js": "^3.8.2", "estraverse": "^5.2.0", @@ -57735,14 +56285,14 @@ } }, "@storybook/store": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/store/-/store-6.5.7.tgz", - "integrity": "sha512-d64towcdylC6TXNL2oJklCpwN3XcUGgZzQ9zgoV8BUlOlsj9tNq8eo95uzTURnLg1Q5uHoDDKWuXrrKj03HHxw==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/store/-/store-6.5.8.tgz", + "integrity": "sha512-5rhGjN/O0oLebRv947B0vgapq48qBBBYYOgq4krRUYU2ecS6LUgtAHR/kTa324o9aBO8cnIXHH78jZcSvMiJlQ==", "dev": true, "requires": { - "@storybook/addons": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/core-events": "6.5.7", + "@storybook/addons": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/core-events": "6.5.8", "@storybook/csf": "0.0.2--canary.4566f4d.1", "core-js": "^3.8.2", "fast-deep-equal": "^3.1.3", @@ -57758,13 +56308,13 @@ } }, "@storybook/telemetry": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/telemetry/-/telemetry-6.5.7.tgz", - "integrity": "sha512-RHrjAConMqGIsu1TgNXztWtWOXTvvCHDWyGoLagCgZYgjGJ4sukp+ZtrbkayNDkkWWD0lpMzsdDEYCJuru/Sig==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/telemetry/-/telemetry-6.5.8.tgz", + "integrity": "sha512-QnAhYF8CwcjC1bT2PK7Zqvo6E42TPl0MY6JS+H6qSZU/BmYeS0It8ZURNfPsA/OzVVLHUkQs96CisKh3N0WWaw==", "dev": true, "requires": { - "@storybook/client-logger": "6.5.7", - "@storybook/core-common": "6.5.7", + "@storybook/client-logger": "6.5.8", + "@storybook/core-common": "6.5.8", "chalk": "^4.1.0", "core-js": "^3.8.2", "detect-package-manager": "^2.0.1", @@ -57857,31 +56407,31 @@ } }, "@storybook/theming": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/theming/-/theming-6.5.7.tgz", - "integrity": "sha512-6zp1V84DSBcS8BtFOCJlF2/nIonjQmr+dILPxaM3lCm/X003i2jAQrBKTfPlmzCeDn07PBhzHaRJ3wJskfmeNw==", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/theming/-/theming-6.5.8.tgz", + "integrity": "sha512-1VaKHqj38Ls1bJwDpg3+aEOlvVib/DCFkP6WYrP/AQtNAzxiyw5WkaoRlTVJZvCdu5TxjpG4O6/Ai5TI9QftIg==", "dev": true, "requires": { - "@storybook/client-logger": "6.5.7", + "@storybook/client-logger": "6.5.8", "core-js": "^3.8.2", "regenerator-runtime": "^0.13.7" } }, "@storybook/ui": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/@storybook/ui/-/ui-6.5.7.tgz", - "integrity": "sha512-NOg44bc/w7FweuM2fa99PxsgI9qoG2p5vhTQ4MOI/7QnOUDn+EenlapsRos+/Sk2XTaB2QmM43boUkravMSouA==", - "dev": true, - "requires": { - "@storybook/addons": "6.5.7", - "@storybook/api": "6.5.7", - "@storybook/channels": "6.5.7", - "@storybook/client-logger": "6.5.7", - "@storybook/components": "6.5.7", - "@storybook/core-events": "6.5.7", - "@storybook/router": "6.5.7", + "version": "6.5.8", + "resolved": "https://registry.npmjs.org/@storybook/ui/-/ui-6.5.8.tgz", + "integrity": "sha512-rL09kxgY9pCVbxr/VUK4b5FL5VbALfciZR+50sNT1EcTDb9k0OPeqx7a4Ptc+KNkgyPdSTxUGvhzVqH5PYrhZQ==", + "dev": true, + "requires": { + "@storybook/addons": "6.5.8", + "@storybook/api": "6.5.8", + "@storybook/channels": "6.5.8", + "@storybook/client-logger": "6.5.8", + "@storybook/components": "6.5.8", + "@storybook/core-events": "6.5.8", + "@storybook/router": "6.5.8", "@storybook/semver": "^7.3.2", - "@storybook/theming": "6.5.7", + "@storybook/theming": "6.5.8", "core-js": "^3.8.2", "regenerator-runtime": "^0.13.7", "resolve-from": "^5.0.0" @@ -58556,11 +57106,6 @@ "@types/node": "*" } }, - "@types/invariant": { - "version": "2.2.33", - "resolved": "https://registry.npmjs.org/@types/invariant/-/invariant-2.2.33.tgz", - "integrity": "sha512-/jUNmS8d4bCKdqslfxW6dg/9Gksfzxz67IYfqApHn+HvHlMVXwYv2zpTDnS/yaK9BB0i0GlBTaYci0EFE62Hmw==" - }, "@types/is-function": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/@types/is-function/-/is-function-1.0.1.tgz", @@ -59932,14 +58477,6 @@ "type-fest": "^0.11.0" } }, - "ansi-html": { - "version": "0.0.7", - "resolved": "https://registry.npmjs.org/ansi-html/-/ansi-html-0.0.7.tgz", - "integrity": "sha1-gTWEAhliqenm/QOflA0S9WynhZ4=", - "dev": true, - "optional": true, - "peer": true - }, "ansi-html-community": { "version": "0.0.8", "resolved": "https://registry.npmjs.org/ansi-html-community/-/ansi-html-community-0.0.8.tgz", @@ -60236,14 +58773,6 @@ "dev": true, "optional": true }, - "async-limiter": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/async-limiter/-/async-limiter-1.0.1.tgz", - "integrity": "sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ==", - "dev": true, - "optional": true, - "peer": true - }, "asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", @@ -62014,7 +60543,8 @@ "commondir": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", - "integrity": "sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs=" + "integrity": "sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs=", + "dev": true }, "compare-versions": { "version": "3.6.0", @@ -62525,7 +61055,7 @@ "to-regex-range": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", - "integrity": "sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", "dev": true, "requires": { "is-number": "^3.0.0", @@ -63017,128 +61547,6 @@ "untildify": "^2.0.0" } }, - "default-gateway": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/default-gateway/-/default-gateway-4.2.0.tgz", - "integrity": "sha512-h6sMrVB1VMWVrW13mSc6ia/DwYYw5MN6+exNu1OaJeFac5aSAvwM7lZ0NVfTABuSkQelr4h5oebg3KB1XPdjgA==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "execa": "^1.0.0", - "ip-regex": "^2.1.0" - }, - "dependencies": { - "cross-spawn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", - "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "nice-try": "^1.0.4", - "path-key": "^2.0.1", - "semver": "^5.5.0", - "shebang-command": "^1.2.0", - "which": "^1.2.9" - } - }, - "execa": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/execa/-/execa-1.0.0.tgz", - "integrity": "sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "cross-spawn": "^6.0.0", - "get-stream": "^4.0.0", - "is-stream": "^1.1.0", - "npm-run-path": "^2.0.0", - "p-finally": "^1.0.0", - "signal-exit": "^3.0.0", - "strip-eof": "^1.0.0" - } - }, - "get-stream": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", - "integrity": "sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "pump": "^3.0.0" - } - }, - "is-stream": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", - "integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ=", - "dev": true, - "optional": true, - "peer": true - }, - "npm-run-path": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-2.0.2.tgz", - "integrity": "sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8=", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "path-key": "^2.0.0" - } - }, - "path-key": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", - "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=", - "dev": true, - "optional": true, - "peer": true - }, - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", - "dev": true, - "optional": true, - "peer": true - }, - "shebang-command": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "shebang-regex": "^1.0.0" - } - }, - "shebang-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", - "dev": true, - "optional": true, - "peer": true - }, - "which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "isexe": "^2.0.0" - } - } - } - }, "defaults": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/defaults/-/defaults-1.0.3.tgz", @@ -63304,80 +61712,6 @@ } } }, - "del": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/del/-/del-4.1.1.tgz", - "integrity": "sha512-QwGuEUouP2kVwQenAsOof5Fv8K9t3D8Ca8NxcXKrIpEHjTXK5J2nXLdP+ALI1cgv8wj7KuwBhTwBkOZSJKM5XQ==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "@types/glob": "^7.1.1", - "globby": "^6.1.0", - "is-path-cwd": "^2.0.0", - "is-path-in-cwd": "^2.0.0", - "p-map": "^2.0.0", - "pify": "^4.0.1", - "rimraf": "^2.6.3" - }, - "dependencies": { - "array-union": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/array-union/-/array-union-1.0.2.tgz", - "integrity": "sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk=", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "array-uniq": "^1.0.1" - } - }, - "globby": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/globby/-/globby-6.1.0.tgz", - "integrity": "sha1-9abXDoOV4hyFj7BInWTfAkJNUGw=", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "array-union": "^1.0.1", - "glob": "^7.0.3", - "object-assign": "^4.0.1", - "pify": "^2.0.0", - "pinkie-promise": "^2.0.0" - }, - "dependencies": { - "pify": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", - "dev": true, - "optional": true, - "peer": true - } - } - }, - "p-map": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/p-map/-/p-map-2.1.0.tgz", - "integrity": "sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw==", - "dev": true, - "optional": true, - "peer": true - }, - "rimraf": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", - "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "glob": "^7.1.3" - } - } - } - }, "delayed-stream": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", @@ -63883,6 +62217,28 @@ "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", "dev": true }, + "encoding": { + "version": "0.1.13", + "resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.13.tgz", + "integrity": "sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==", + "optional": true, + "peer": true, + "requires": { + "iconv-lite": "^0.6.2" + }, + "dependencies": { + "iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "optional": true, + "peer": true, + "requires": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + } + } + } + }, "end-of-stream": { "version": "1.4.4", "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", @@ -64079,13 +62435,6 @@ "esbuild-windows-arm64": "0.14.39" } }, - "esbuild-linux-64": { - "version": "0.14.39", - "resolved": "https://registry.npmjs.org/esbuild-linux-64/-/esbuild-linux-64-0.14.39.tgz", - "integrity": "sha512-4tcgFDYWdI+UbNMGlua9u1Zhu0N5R6u9tl5WOM8aVnNX143JZoBZLpCuUr5lCKhnD0SCO+5gUyMfupGrHtfggQ==", - "dev": true, - "optional": true - }, "escalade": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", @@ -64478,6 +62827,16 @@ } } }, + "eslint-plugin-css-modules": { + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-css-modules/-/eslint-plugin-css-modules-2.11.0.tgz", + "integrity": "sha512-CLvQvJOMlCywZzaI4HVu7QH/ltgNXvCg7giJGiE+sA9wh5zQ+AqTgftAzrERV22wHe1p688wrU/Zwxt1Ry922w==", + "dev": true, + "requires": { + "gonzales-pe": "^4.0.3", + "lodash": "^4.17.2" + } + }, "eslint-plugin-flowtype": { "version": "8.0.3", "resolved": "https://registry.npmjs.org/eslint-plugin-flowtype/-/eslint-plugin-flowtype-8.0.3.tgz", @@ -64779,11 +63138,6 @@ "c8": "^7.6.0" } }, - "estree-walker": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-0.6.1.tgz", - "integrity": "sha512-SqmZANLWS0mnatqbSfRP5g8OXZC12Fgg1IwNtLsyHDzJizORW4khDfjPqJZsemPWBB2uqykUah5YpQ6epsqC/w==" - }, "esutils": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", @@ -64813,17 +63167,6 @@ "integrity": "sha512-/46HWwbfCX2xTawVfkKLGxMifJYQBWMwY1mjywRtb4c9x8l5NP3KoJtnIOiL1hfdRkIuYhETxQlo62IF8tcnlg==", "dev": true }, - "eventsource": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-1.0.7.tgz", - "integrity": "sha512-4Ln17+vVT0k8aWq+t/bF5arcS3EpT9gYtW66EPacdj/mAFevznsnyoHLPy2BA8gbIQeIHoPsvwmfBftfcG//BQ==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "original": "^1.0.0" - } - }, "evp_bytestokey": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz", @@ -65284,17 +63627,6 @@ "format": "^0.2.0" } }, - "faye-websocket": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.10.0.tgz", - "integrity": "sha1-TkkvjQTftviQA1B/btvy1QHnxvQ=", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "websocket-driver": ">=0.5.1" - } - }, "fb-watchman": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.1.tgz", @@ -65528,6 +63860,7 @@ "version": "3.3.1", "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.1.tgz", "integrity": "sha512-t2GDMt3oGC/v+BMwzmllWDuJF/xcDtE5j/fCGbqDD7OLuJkj0cfh1YSA5VKPvwMeLFLNDBkwOKZ2X85jGLVftQ==", + "dev": true, "requires": { "commondir": "^1.0.1", "make-dir": "^3.0.2", @@ -65538,6 +63871,7 @@ "version": "4.1.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, "requires": { "locate-path": "^5.0.0", "path-exists": "^4.0.0" @@ -65553,36 +63887,36 @@ } }, "firebase": { - "version": "9.8.2", - "resolved": "https://registry.npmjs.org/firebase/-/firebase-9.8.2.tgz", - "integrity": "sha512-cVPpiR18vsLuGWAAVkVhNO6mYsEgYBqawvMI2zxKo2FCtneyBgMwOyWKI8VyCmL5ze5p5QJTPjkoatM6rZkd0Q==", - "requires": { - "@firebase/analytics": "0.7.9", - "@firebase/analytics-compat": "0.1.10", - "@firebase/app": "0.7.25", - "@firebase/app-check": "0.5.8", - "@firebase/app-check-compat": "0.2.8", - "@firebase/app-compat": "0.1.26", + "version": "9.8.3", + "resolved": "https://registry.npmjs.org/firebase/-/firebase-9.8.3.tgz", + "integrity": "sha512-PCThy5cFXnbiUtFPJ9vVdcG7wKibOKNR+iuNXf+54xMGJzYb+rM2P8GUqtr2fhVQkfs42uJ6gGKG4soNGkP64w==", + "requires": { + "@firebase/analytics": "0.7.10", + "@firebase/analytics-compat": "0.1.11", + "@firebase/app": "0.7.26", + "@firebase/app-check": "0.5.9", + "@firebase/app-check-compat": "0.2.9", + "@firebase/app-compat": "0.1.27", "@firebase/app-types": "0.7.0", - "@firebase/auth": "0.20.2", - "@firebase/auth-compat": "0.2.15", - "@firebase/database": "0.13.0", - "@firebase/database-compat": "0.2.0", - "@firebase/firestore": "3.4.9", - "@firebase/firestore-compat": "0.1.18", - "@firebase/functions": "0.8.1", - "@firebase/functions-compat": "0.2.1", - "@firebase/installations": "0.5.9", - "@firebase/messaging": "0.9.13", - "@firebase/messaging-compat": "0.1.13", - "@firebase/performance": "0.5.9", - "@firebase/performance-compat": "0.1.9", + "@firebase/auth": "0.20.3", + "@firebase/auth-compat": "0.2.16", + "@firebase/database": "0.13.1", + "@firebase/database-compat": "0.2.1", + "@firebase/firestore": "3.4.10", + "@firebase/firestore-compat": "0.1.19", + "@firebase/functions": "0.8.2", + "@firebase/functions-compat": "0.2.2", + "@firebase/installations": "0.5.10", + "@firebase/messaging": "0.9.14", + "@firebase/messaging-compat": "0.1.14", + "@firebase/performance": "0.5.10", + "@firebase/performance-compat": "0.1.10", "@firebase/polyfill": "0.3.36", - "@firebase/remote-config": "0.3.8", - "@firebase/remote-config-compat": "0.1.9", - "@firebase/storage": "0.9.6", - "@firebase/storage-compat": "0.1.14", - "@firebase/util": "1.6.0" + "@firebase/remote-config": "0.3.9", + "@firebase/remote-config-compat": "0.1.10", + "@firebase/storage": "0.9.7", + "@firebase/storage-compat": "0.1.15", + "@firebase/util": "1.6.1" } }, "flat": { @@ -65847,12 +64181,39 @@ "map-cache": "^0.2.2" } }, + "framer-motion": { + "version": "6.3.11", + "resolved": "https://registry.npmjs.org/framer-motion/-/framer-motion-6.3.11.tgz", + "integrity": "sha512-xQLk+ZSklNs5QNCUmdWPpKMOuWiB8ZETsvcIOWw8xvri9K3TamuifgCI/B6XpaEDR0/V2ZQF2Wm+gUAZrXo+rw==", + "requires": { + "@emotion/is-prop-valid": "^0.8.2", + "framesync": "6.0.1", + "hey-listen": "^1.0.8", + "popmotion": "11.0.3", + "style-value-types": "5.0.0", + "tslib": "^2.1.0" + }, + "dependencies": { + "tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + } + } + }, "framesync": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/framesync/-/framesync-4.1.0.tgz", - "integrity": "sha512-MmgZ4wCoeVxNbx2xp5hN/zPDCbLSKiDt4BbbslK7j/pM2lg5S0vhTNv1v8BCVb99JPIo6hXBFdwzU7Q4qcAaoQ==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/framesync/-/framesync-6.0.1.tgz", + "integrity": "sha512-fUY88kXvGiIItgNC7wcTOl0SNRCVXMKSWW2Yzfmn7EKNc+MpCzcz9DhdHcdjbrtN3c6R4H5dTY2jiCpPdysEjA==", "requires": { - "hey-listen": "^1.0.5" + "tslib": "^2.1.0" + }, + "dependencies": { + "tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + } } }, "fresh": { @@ -65901,6 +64262,7 @@ "version": "8.1.0", "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-8.1.0.tgz", "integrity": "sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==", + "dev": true, "requires": { "graceful-fs": "^4.2.0", "jsonfile": "^4.0.0", @@ -66271,6 +64633,15 @@ "slash": "^3.0.0" } }, + "gonzales-pe": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/gonzales-pe/-/gonzales-pe-4.3.0.tgz", + "integrity": "sha512-otgSPpUmdWJ43VXyiNgEYE4luzHCL2pz4wQ0OnDluC6Eg4Ko3Vexy/SrSynglw/eR+OhkzmqFCZa/OFa/RgAOQ==", + "dev": true, + "requires": { + "minimist": "^1.2.5" + } + }, "got": { "version": "9.6.0", "resolved": "https://registry.npmjs.org/got/-/got-9.6.0.tgz", @@ -66304,7 +64675,8 @@ "graceful-fs": { "version": "4.2.9", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.9.tgz", - "integrity": "sha512-NtNxqUcXgpW2iMrfqSfR73Glt39K+BLwWsPs94yR63v45T0Wbej7eRmL5cWfwEgqXnmjQp3zaJTshdRW/qC2ZQ==" + "integrity": "sha512-NtNxqUcXgpW2iMrfqSfR73Glt39K+BLwWsPs94yR63v45T0Wbej7eRmL5cWfwEgqXnmjQp3zaJTshdRW/qC2ZQ==", + "dev": true }, "gzip-size": { "version": "6.0.0", @@ -66704,14 +65076,6 @@ "whatwg-encoding": "^1.0.5" } }, - "html-entities": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/html-entities/-/html-entities-1.4.0.tgz", - "integrity": "sha512-8nxjcBcd8wovbeKx7h3wTji4e6+rhaVuPNpMqwWgnHh+N9ToqsCs6XztWRBPQ+UtzsoMAdKZtUENoVzU/EMtZA==", - "dev": true, - "optional": true, - "peer": true - }, "html-escaper": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", @@ -66887,141 +65251,6 @@ "debug": "4" } }, - "http-proxy-middleware": { - "version": "0.19.1", - "resolved": "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-0.19.1.tgz", - "integrity": "sha512-yHYTgWMQO8VvwNS22eLLloAkvungsKdKTLO8AJlftYIKNfJr3GK3zK0ZCfzDDGUBttdGc8xFy1mCitvNKQtC3Q==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "http-proxy": "^1.17.0", - "is-glob": "^4.0.0", - "lodash": "^4.17.11", - "micromatch": "^3.1.10" - }, - "dependencies": { - "braces": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", - "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "arr-flatten": "^1.1.0", - "array-unique": "^0.3.2", - "extend-shallow": "^2.0.1", - "fill-range": "^4.0.0", - "isobject": "^3.0.1", - "repeat-element": "^1.1.2", - "snapdragon": "^0.8.1", - "snapdragon-node": "^2.0.1", - "split-string": "^3.0.2", - "to-regex": "^3.0.1" - }, - "dependencies": { - "extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "is-extendable": "^0.1.0" - } - } - } - }, - "fill-range": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", - "integrity": "sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc=", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "extend-shallow": "^2.0.1", - "is-number": "^3.0.0", - "repeat-string": "^1.6.1", - "to-regex-range": "^2.1.0" - }, - "dependencies": { - "extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "is-extendable": "^0.1.0" - } - } - } - }, - "is-number": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", - "integrity": "sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU=", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "kind-of": "^3.0.2" - }, - "dependencies": { - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "is-buffer": "^1.1.5" - } - } - } - }, - "micromatch": { - "version": "3.1.10", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", - "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "arr-diff": "^4.0.0", - "array-unique": "^0.3.2", - "braces": "^2.3.1", - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "extglob": "^2.0.4", - "fragment-cache": "^0.2.1", - "kind-of": "^6.0.2", - "nanomatch": "^1.2.9", - "object.pick": "^1.3.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.2" - } - }, - "to-regex-range": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", - "integrity": "sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "is-number": "^3.0.0", - "repeat-string": "^1.6.1" - } - } - } - }, "http2-client": { "version": "1.3.5", "resolved": "https://registry.npmjs.org/http2-client/-/http2-client-1.3.5.tgz", @@ -67526,18 +65755,6 @@ } } }, - "internal-ip": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/internal-ip/-/internal-ip-4.3.0.tgz", - "integrity": "sha512-S1zBo1D6zcsyuC6PMmY5+55YMILQ9av8lotMx447Bq6SAgo/sDK6y6uUKmuYhW7eacnIhFfsPmCNYdDzsnnDCg==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "default-gateway": "^4.2.0", - "ipaddr.js": "^1.9.0" - } - }, "internal-slot": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.3.tgz", @@ -67592,14 +65809,6 @@ "integrity": "sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo=", "dev": true }, - "ip-regex": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/ip-regex/-/ip-regex-2.1.0.tgz", - "integrity": "sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk=", - "dev": true, - "optional": true, - "peer": true - }, "ipaddr.js": { "version": "1.9.1", "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", @@ -67909,28 +66118,6 @@ "integrity": "sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ==", "dev": true }, - "is-path-in-cwd": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-path-in-cwd/-/is-path-in-cwd-2.1.0.tgz", - "integrity": "sha512-rNocXHgipO+rvnP6dk3zI20RpOtrAM/kzbB258Uw5BWr3TpXi861yzjo16Dn4hUox07iw5AyeMLHWsujkjzvRQ==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "is-path-inside": "^2.1.0" - } - }, - "is-path-inside": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-2.1.0.tgz", - "integrity": "sha512-wiyhTzfDWsvwAW53OBWF5zuvaOGlZ6PwYxAbPVDhpm+gM09xKQGjBq/8uYN12aDvMxnAnq3dxTyoSoRNmg5YFg==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "path-is-inside": "^1.0.2" - } - }, "is-plain-obj": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-3.0.0.tgz", @@ -71007,14 +69194,6 @@ "integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=", "dev": true }, - "json3": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/json3/-/json3-3.3.3.tgz", - "integrity": "sha512-c7/8mbUsKigAbLkD5B010BK4D9LZm7A1pNItkEwiUZRpIN66exu/e7YQWysGun+TRKaJp8MhemM+VkfWv42aCA==", - "dev": true, - "optional": true, - "peer": true - }, "json5": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz", @@ -71034,6 +69213,7 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz", "integrity": "sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss=", + "dev": true, "requires": { "graceful-fs": "^4.1.6" } @@ -71116,14 +69296,6 @@ "json-buffer": "3.0.0" } }, - "killable": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/killable/-/killable-1.0.1.tgz", - "integrity": "sha512-LzqtLKlUwirEUyl/nicirVmNiPvYs7l5n8wOPP7fyJVpUPkvCnW/vuiXGpylGUlnPDnB7311rARzAt3Mhswpjg==", - "dev": true, - "optional": true, - "peer": true - }, "kind-of": { "version": "6.0.3", "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", @@ -71414,7 +69586,7 @@ "strip-bom": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-2.0.0.tgz", - "integrity": "sha1-YhmoVhZSBJHzV4i9vxRHqZx+aw4=", + "integrity": "sha512-kwrX1y7czp1E69n2ajbG65mIo9dqvJ+8aBQXOGVxqwvNbsXdFM6Lq37dLAY3mknUwru8CfcCbfOLL/gMo+fi3g==", "dev": true, "optional": true, "requires": { @@ -71444,6 +69616,7 @@ "version": "5.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, "requires": { "p-locate": "^4.1.0" } @@ -71661,14 +69834,6 @@ } } }, - "loglevel": { - "version": "1.7.1", - "resolved": "https://registry.npmjs.org/loglevel/-/loglevel-1.7.1.tgz", - "integrity": "sha512-Hesni4s5UkWkwCGJMQGAh71PaLUmKFM60dHvq0zi/vDhhrzuk+4GgNbTXJ12YYQJn6ZKBDNIjYcuQGKudvqrIw==", - "dev": true, - "optional": true, - "peer": true - }, "long": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz", @@ -71759,6 +69924,7 @@ "version": "3.1.0", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "dev": true, "requires": { "semver": "^6.0.0" } @@ -72361,7 +70527,7 @@ "strip-indent": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-1.0.1.tgz", - "integrity": "sha1-DHlipq3vp7vUrDZkYKY4VSrhoKI=", + "integrity": "sha512-I5iQq6aFMM62fBEAIB/hXzwJD6EEZ0xEGCX2t7oXqaKPIRgt4WruAQ285BISgdkP+HLGWyeGmNJcpIwFeRYRUA==", "dev": true, "optional": true, "requires": { @@ -73095,6 +71261,12 @@ "integrity": "sha512-hA0fIHy/74p5DE0rdmrpU0sV1U+gnWTcgShWequGRLy0L1eT+zY0ozFukawpLaxMwIA+orRcqFRElYwT+5p81A==", "dev": true }, + "monaco-editor": { + "version": "0.33.0", + "resolved": "https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.33.0.tgz", + "integrity": "sha512-VcRWPSLIUEgQJQIE0pVT8FcGBIgFoxz7jtqctE+IiCxWugD0DwgyQBcZBhdSrdMC84eumoqMZsGl2GTreOzwqw==", + "peer": true + }, "move-concurrently": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/move-concurrently/-/move-concurrently-1.0.1.tgz", @@ -73347,14 +71519,6 @@ "http2-client": "^1.2.5" } }, - "node-forge": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz", - "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==", - "dev": true, - "optional": true, - "peer": true - }, "node-int64": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz", @@ -73830,9 +71994,9 @@ } }, "openapi-types": { - "version": "11.0.1", - "resolved": "https://registry.npmjs.org/openapi-types/-/openapi-types-11.0.1.tgz", - "integrity": "sha512-P2pGRlHFXgP8z6vrp5P/MtftOXYtlIY1A+V0VmioOoo85NN6RSPgGbEprRAUNMIsbfRjnCPdx/r8mi8QRR7grQ==", + "version": "12.0.0", + "resolved": "https://registry.npmjs.org/openapi-types/-/openapi-types-12.0.0.tgz", + "integrity": "sha512-6Wd9k8nmGQHgCbehZCP6wwWcfXcvinhybUTBatuhjRsCxUIujuYFZc9QnGeae75CyHASewBtxs0HX/qwREReUw==", "dev": true, "peer": true }, @@ -73851,27 +72015,6 @@ "integrity": "sha512-8AV/sCtuzUeTo8gQK5qDZzARrulB3egtLzFgteqB2tcT4Mw7B8Kt7JcDHmltjz6FOAHsvTevk70gZEbhM4ZS9Q==", "dev": true }, - "opn": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/opn/-/opn-5.5.0.tgz", - "integrity": "sha512-PqHpggC9bLV0VeWcdKhkpxY+3JTzetLSqTCWL/z/tFIbI6G8JCjondXklT1JinczLz2Xib62sSp0T/gKT4KksA==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "is-wsl": "^1.1.0" - }, - "dependencies": { - "is-wsl": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-1.1.0.tgz", - "integrity": "sha1-HxbkqiKwTRM2tmGIpmrzxgDDpm0=", - "dev": true, - "optional": true, - "peer": true - } - } - }, "optionator": { "version": "0.9.1", "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", @@ -73954,17 +72097,6 @@ } } }, - "original": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/original/-/original-1.0.2.tgz", - "integrity": "sha512-hyBVl6iqqUOJ8FqRe+l/gS8H+kKYjrEndd5Pm1MfBtsEKA038HkkdbAl/72EAXGyonD/PFsvmVG+EvcIpliMBg==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "url-parse": "^1.4.3" - } - }, "orval": { "version": "6.8.1", "resolved": "https://registry.npmjs.org/orval/-/orval-6.8.1.tgz", @@ -74242,6 +72374,7 @@ "version": "2.3.0", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, "requires": { "p-try": "^2.0.0" } @@ -74250,6 +72383,7 @@ "version": "4.1.0", "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, "requires": { "p-limit": "^2.2.0" } @@ -74263,17 +72397,6 @@ "aggregate-error": "^3.0.0" } }, - "p-retry": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/p-retry/-/p-retry-3.0.1.tgz", - "integrity": "sha512-XE6G4+YTTkT2a0UWb2kjZe8xNwf8bIbnqpc/IS/idOBVhyves0mK5OJgeocjx7q5pvX/6m23xuzVPYT1uGM73w==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "retry": "^0.12.0" - } - }, "p-timeout": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-3.2.0.tgz", @@ -74286,7 +72409,8 @@ "p-try": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", - "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==" + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true }, "pac-proxy-agent": { "version": "5.0.0", @@ -74501,21 +72625,14 @@ "path-exists": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", - "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==" + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true }, "path-is-absolute": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" }, - "path-is-inside": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/path-is-inside/-/path-is-inside-1.0.2.tgz", - "integrity": "sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM=", - "dev": true, - "optional": true, - "peer": true - }, "path-key": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", @@ -74525,7 +72642,8 @@ "path-parse": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", - "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==" + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true }, "path-to-regexp": { "version": "6.2.0", @@ -74606,6 +72724,7 @@ "version": "4.2.0", "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", + "dev": true, "requires": { "find-up": "^4.0.0" } @@ -74689,31 +72808,21 @@ "dev": true }, "popmotion": { - "version": "8.7.5", - "resolved": "https://registry.npmjs.org/popmotion/-/popmotion-8.7.5.tgz", - "integrity": "sha512-p85l/qrOuLTQZ+aGfyB8cqOzDRWgiSFN941jSrj9CsWeJzUn+jiGSWJ50sr59gWAZ8TKIvqdDowqFlScc0NEyw==", - "requires": { - "@popmotion/easing": "^1.0.1", - "@popmotion/popcorn": "^0.4.4", - "framesync": "^4.0.0", - "hey-listen": "^1.0.5", - "style-value-types": "^3.1.7", - "stylefire": "^7.0.1", - "tslib": "^1.10.0" - } - }, - "popmotion-pose": { - "version": "3.4.11", - "resolved": "https://registry.npmjs.org/popmotion-pose/-/popmotion-pose-3.4.11.tgz", - "integrity": "sha512-KjaevePyC1+Q3ylIcBO3YMhCouE1a/3bvtBXThrwz44fw1yXCUQagPJGkGirXI/J1xF+w3Lx3bpkkgwArizpEQ==", + "version": "11.0.3", + "resolved": "https://registry.npmjs.org/popmotion/-/popmotion-11.0.3.tgz", + "integrity": "sha512-Y55FLdj3UxkR7Vl3s7Qr4e9m0onSnP8W7d/xQLsoJM40vs6UKHFdygs6SWryasTZYqugMjm3BepCF4CWXDiHgA==", "requires": { - "@popmotion/easing": "^1.0.1", - "hey-listen": "^1.0.5", - "popmotion": "^8.7.1", - "pose-core": "^2.1.1", - "style-value-types": "^3.0.6", - "ts-essentials": "^1.0.3", - "tslib": "^1.10.0" + "framesync": "6.0.1", + "hey-listen": "^1.0.8", + "style-value-types": "5.0.0", + "tslib": "^2.1.0" + }, + "dependencies": { + "tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + } } }, "portfinder": { @@ -74738,31 +72847,6 @@ } } }, - "pose-core": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/pose-core/-/pose-core-2.1.1.tgz", - "integrity": "sha512-fV1sDfu80debHmKerikypqGoORMEUHVwGh/BlWnqUSmmzQGYIg8neDrdwe66hFeRO+adr2qS4ZERSu/ZVjOiSQ==", - "requires": { - "@types/invariant": "^2.2.29", - "@types/node": "^10.0.5", - "hey-listen": "^1.0.5", - "rollup-plugin-typescript2": "^0.25.2", - "tslib": "^1.10.0", - "typescript": "^3.7.2" - }, - "dependencies": { - "@types/node": { - "version": "10.17.32", - "resolved": "https://registry.npmjs.org/@types/node/-/node-10.17.32.tgz", - "integrity": "sha512-EUq+cjH/3KCzQHikGnNbWAGe548IFLSm93Vl8xA7EuYEEATiyOVDyEVuGkowL7c9V69FF/RiZSAOCFPApMs/ig==" - }, - "typescript": { - "version": "3.9.7", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.7.tgz", - "integrity": "sha512-BLbiRkiBzAwsjut4x/dsibSTB6yWpwT5qWmC2OfuCg3GgVQCSgMs4vEctYPhsaGtd0AeuuHMkjZ2h2WG8MSzRw==" - } - } - }, "posix-character-classes": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/posix-character-classes/-/posix-character-classes-0.1.1.tgz", @@ -75310,14 +73394,6 @@ "integrity": "sha512-773xhDQnZBMFobEiztv8LIl70ch5MSF/jUQVlhwFyBILqq96anmoctVIYz+ZRp0qbCKATTn6ev02M3r7Ga5vqA==", "dev": true }, - "querystringify": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz", - "integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==", - "dev": true, - "optional": true, - "peer": true - }, "queue-microtask": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.2.tgz", @@ -75984,17 +74060,6 @@ "resize-observer-polyfill": "^1.5.0" } }, - "react-pose": { - "version": "4.0.10", - "resolved": "https://registry.npmjs.org/react-pose/-/react-pose-4.0.10.tgz", - "integrity": "sha512-OKc5oqKw+nL9FvIokxn8MmaAmkNsWv64hLX9xWWcMWXSgEo745hzYUqDn2viMJ97mf76oPy6Vc+BS4k6Kwj78g==", - "requires": { - "@emotion/is-prop-valid": "^0.7.3", - "hey-listen": "^1.0.5", - "popmotion-pose": "^3.4.10", - "tslib": "^1.10.0" - } - }, "react-query": { "version": "3.39.1", "resolved": "https://registry.npmjs.org/react-query/-/react-query-3.39.1.tgz", @@ -76129,9 +74194,9 @@ "dev": true }, "acorn": { - "version": "8.7.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.0.tgz", - "integrity": "sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ==", + "version": "8.7.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.1.tgz", + "integrity": "sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A==", "dev": true, "optional": true, "peer": true @@ -77617,9 +75682,9 @@ } }, "type-fest": { - "version": "2.11.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.11.0.tgz", - "integrity": "sha512-GwRKR1jZMAQP/hVR929DWB5Z2lwSIM/nNcHEfDj2E0vOMhcYbqFxGKE5JaSzMdzmEtWJiamEn6VwHs/YVXVhEQ==", + "version": "2.13.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.13.0.tgz", + "integrity": "sha512-lPfAm42MxE4/456+QyIaaVBAwgpJb6xZ8PRu09utnhPdWwcyj9vgy6Sq0Z5yNbJ21EdxB5dRU/Qg8bsyAMtlcw==", "dev": true, "optional": true, "peer": true @@ -78987,7 +77052,7 @@ "strip-ansi": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", - "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "integrity": "sha512-VhumSSbBqDTP8p2ZLKj40UjBCV4+v8bUSEpUb4KjRgWk9pbqGF4REFj6KEagidb2f/M6AzC0EmFyDNGaw9OCzg==", "dev": true, "requires": { "ansi-regex": "^2.0.0" @@ -79034,14 +77099,6 @@ "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", "dev": true }, - "require-main-filename": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", - "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", - "dev": true, - "optional": true, - "peer": true - }, "requires-port": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", @@ -79063,6 +77120,7 @@ "version": "1.12.0", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.12.0.tgz", "integrity": "sha512-B/dOmuoAik5bKcD6s6nXDCjzUKnaDvdkRyAk6rsmsKLipWj4797iothd7jmmUhWTfinVMU+wc56rYKsit2Qy4w==", + "dev": true, "requires": { "path-parse": "^1.0.6" } @@ -79168,14 +77226,6 @@ "integrity": "sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==", "dev": true }, - "retry": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz", - "integrity": "sha1-G0KmJmoh8HQh0bC1S33BZ7AcATs=", - "dev": true, - "optional": true, - "peer": true - }, "reusify": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", @@ -79210,6 +77260,7 @@ "version": "2.67.3", "resolved": "https://registry.npmjs.org/rollup/-/rollup-2.67.3.tgz", "integrity": "sha512-G/x1vUwbGtP6O5ZM8/sWr8+p7YfZhI18pPqMRtMYMWSbHjKZ/ajHGiM+GWNTlWyOR0EHIdT8LHU+Z4ciIZ1oBw==", + "dev": true, "requires": { "fsevents": "~2.3.2" } @@ -79227,9 +77278,9 @@ }, "dependencies": { "acorn": { - "version": "8.7.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.0.tgz", - "integrity": "sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ==", + "version": "8.7.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.1.tgz", + "integrity": "sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A==", "dev": true, "optional": true, "peer": true @@ -79268,33 +77319,6 @@ } } }, - "rollup-plugin-typescript2": { - "version": "0.25.3", - "resolved": "https://registry.npmjs.org/rollup-plugin-typescript2/-/rollup-plugin-typescript2-0.25.3.tgz", - "integrity": "sha512-ADkSaidKBovJmf5VBnZBZe+WzaZwofuvYdzGAKTN/J4hN7QJCFYAq7IrH9caxlru6T5qhX41PNFS1S4HqhsGQg==", - "requires": { - "find-cache-dir": "^3.0.0", - "fs-extra": "8.1.0", - "resolve": "1.12.0", - "rollup-pluginutils": "2.8.1", - "tslib": "1.10.0" - }, - "dependencies": { - "tslib": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.10.0.tgz", - "integrity": "sha512-qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ==" - } - } - }, - "rollup-pluginutils": { - "version": "2.8.1", - "resolved": "https://registry.npmjs.org/rollup-pluginutils/-/rollup-pluginutils-2.8.1.tgz", - "integrity": "sha512-J5oAoysWar6GuZo0s+3bZ6sVZAC0pfqKz68De7ZgDi5z63jOVZn1uJL/+z1jeKHNbGII8kAyHF5q8LnxSX5lQg==", - "requires": { - "estree-walker": "^0.6.1" - } - }, "rsvp": { "version": "4.8.5", "resolved": "https://registry.npmjs.org/rsvp/-/rsvp-4.8.5.tgz", @@ -79380,7 +77404,7 @@ "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", - "dev": true + "devOptional": true }, "sane": { "version": "4.1.0", @@ -79578,7 +77602,7 @@ "shebang-command": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", + "integrity": "sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==", "dev": true, "requires": { "shebang-regex": "^1.0.0" @@ -79587,13 +77611,13 @@ "shebang-regex": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", + "integrity": "sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==", "dev": true }, "to-regex-range": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", - "integrity": "sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", "dev": true, "requires": { "is-number": "^3.0.0", @@ -79658,9 +77682,9 @@ "dev": true }, "sass": { - "version": "1.52.2", - "resolved": "https://registry.npmjs.org/sass/-/sass-1.52.2.tgz", - "integrity": "sha512-mfHB2VSeFS7sZlPv9YohB9GB7yWIgQNTGniQwfQ04EoQN0wsQEv7SwpCwy/x48Af+Z3vDeFXz+iuXM3HK/phZQ==", + "version": "1.52.3", + "resolved": "https://registry.npmjs.org/sass/-/sass-1.52.3.tgz", + "integrity": "sha512-LNNPJ9lafx+j1ArtA7GyEJm9eawXN8KlA1+5dF6IZyoONg1Tyo/g+muOsENWJH/2Q1FHbbV4UwliU0cXMa/VIA==", "requires": { "chokidar": ">=3.0.0 <4.0.0", "immutable": "^4.0.0", @@ -79730,21 +77754,11 @@ "ws": ">=7.4.6" } }, - "selfsigned": { - "version": "1.10.8", - "resolved": "https://registry.npmjs.org/selfsigned/-/selfsigned-1.10.8.tgz", - "integrity": "sha512-2P4PtieJeEwVgTU9QEcwIRDQ/mXJLX8/+I3ur+Pg16nS8oNbrGxEso9NyYWy8NAmXiNl4dlAp5MwoNeCWzON4w==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "node-forge": "^0.10.0" - } - }, "semver": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "devOptional": true }, "semver-compare": { "version": "1.0.0", @@ -79843,7 +77857,7 @@ "serve-favicon": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/serve-favicon/-/serve-favicon-2.5.0.tgz", - "integrity": "sha1-k10kDN/g9YBTB/3+ln2IlCosvPA=", + "integrity": "sha512-FMW2RvqNr03x+C0WxTyu6sOv21oOjkq5j8tjquWccwa6ScNyGFOGJVpuS1NmTVGBAHS07xnSKotgf2ehQmf9iA==", "dev": true, "requires": { "etag": "~1.8.1", @@ -79972,7 +77986,7 @@ "setimmediate": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz", - "integrity": "sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU=" + "integrity": "sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==" }, "setprototypeof": { "version": "1.2.0", @@ -80275,69 +78289,6 @@ } } }, - "sockjs": { - "version": "0.3.20", - "resolved": "https://registry.npmjs.org/sockjs/-/sockjs-0.3.20.tgz", - "integrity": "sha512-SpmVOVpdq0DJc0qArhF3E5xsxvaiqGNb73XfgBpK1y3UD5gs8DSo8aCTsuT5pX8rssdc2NDIzANwP9eCAiSdTA==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "faye-websocket": "^0.10.0", - "uuid": "^3.4.0", - "websocket-driver": "0.6.5" - }, - "dependencies": { - "uuid": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", - "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", - "dev": true, - "optional": true, - "peer": true - } - } - }, - "sockjs-client": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/sockjs-client/-/sockjs-client-1.4.0.tgz", - "integrity": "sha512-5zaLyO8/nri5cua0VtOrFXBPK1jbL4+1cebT/mmKA1E1ZXOvJrII75bPu0l0k843G/+iAbhEqzyKr0w/eCCj7g==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "debug": "^3.2.5", - "eventsource": "^1.0.7", - "faye-websocket": "~0.11.1", - "inherits": "^2.0.3", - "json3": "^3.3.2", - "url-parse": "^1.4.3" - }, - "dependencies": { - "debug": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", - "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "ms": "^2.1.1" - } - }, - "faye-websocket": { - "version": "0.11.3", - "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.11.3.tgz", - "integrity": "sha512-D2y4bovYpzziGgbHYtGCMjlJM36vAl/y+xUyn1C+FVx8szd1E+86KwVw6XvYSzOP8iMpm1X0I4xJD+QtUb36OA==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "websocket-driver": ">=0.5.1" - } - } - } - }, "socks": { "version": "2.6.2", "resolved": "https://registry.npmjs.org/socks/-/socks-2.6.2.tgz", @@ -80606,6 +78557,11 @@ "stacktrace-gps": "^3.0.4" } }, + "state-local": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/state-local/-/state-local-1.0.7.tgz", + "integrity": "sha512-HTEHMNieakEnoe33shBYcZ7NX83ACUjCu8c40iOGEZsngj9zRnkqS9j1pqQPXwobB0ZcVTk27REb7COQ0UR59w==" + }, "state-toggle": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/state-toggle/-/state-toggle-1.0.3.tgz", @@ -80973,12 +78929,19 @@ } }, "style-value-types": { - "version": "3.1.9", - "resolved": "https://registry.npmjs.org/style-value-types/-/style-value-types-3.1.9.tgz", - "integrity": "sha512-050uqgB7WdvtgacoQKm+4EgKzJExVq0sieKBQQtJiU3Muh6MYcCp4T3M8+dfl6VOF2LR0NNwXBP1QYEed8DfIw==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/style-value-types/-/style-value-types-5.0.0.tgz", + "integrity": "sha512-08yq36Ikn4kx4YU6RD7jWEv27v4V+PUsOGa4n/as8Et3CuODMJQ00ENeAVXAeydX4Z2j1XHZF1K2sX4mGl18fA==", "requires": { "hey-listen": "^1.0.8", - "tslib": "^1.10.0" + "tslib": "^2.1.0" + }, + "dependencies": { + "tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + } } }, "styled-components": { @@ -81013,18 +78976,6 @@ } } }, - "stylefire": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/stylefire/-/stylefire-7.0.3.tgz", - "integrity": "sha512-Q0l7NSeFz/OkX+o6/7Zg3VZxSAZeQzQpYomWmIpOehFM/rJNMSLVX5fgg6Q48ut2ETNKwdhm97mPNU643EBCoQ==", - "requires": { - "@popmotion/popcorn": "^0.4.4", - "framesync": "^4.0.0", - "hey-listen": "^1.0.8", - "style-value-types": "^3.1.7", - "tslib": "^1.10.0" - } - }, "stylis": { "version": "4.0.6", "resolved": "https://registry.npmjs.org/stylis/-/stylis-4.0.6.tgz", @@ -81345,9 +79296,9 @@ "dev": true }, "terser": { - "version": "5.14.0", - "resolved": "https://registry.npmjs.org/terser/-/terser-5.14.0.tgz", - "integrity": "sha512-JC6qfIEkPBd9j1SMO3Pfn+A6w2kQV54tv+ABQLgZr7dA3k/DL/OBoYSWxzVpZev3J+bUHXfr55L8Mox7AaNo6g==", + "version": "5.14.1", + "resolved": "https://registry.npmjs.org/terser/-/terser-5.14.1.tgz", + "integrity": "sha512-+ahUAE+iheqBTDxXhTisdA8hgvbEG1hHOQ9xmNjeUJSoi6DU/gMrKNcfZjHkyY6Alnuyc+ikYJaxxfHkT3+WuQ==", "dev": true, "requires": { "@jridgewell/source-map": "^0.3.2", @@ -81476,7 +79427,7 @@ "to-arraybuffer": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/to-arraybuffer/-/to-arraybuffer-1.0.1.tgz", - "integrity": "sha1-fSKbH8xjfkZsoIEYCDanqr/4P0M=", + "integrity": "sha512-okFlQcoGTi4LQBG/PgSYblw9VOyptsz2KJZqc6qtgGdes8VktzUQkj4BI2blit072iS8VODNcMA+tvnS9dnuMA==", "dev": true }, "to-fast-properties": { @@ -81619,11 +79570,6 @@ "resolved": "https://registry.npmjs.org/ts-easing/-/ts-easing-0.2.0.tgz", "integrity": "sha512-Z86EW+fFFh/IFB1fqQ3/+7Zpf9t2ebOAxNI/V6Wo7r5gqiqtxmgTlQ1qbqQcjLKYeSHPTsEmvlJUDg/EuL0uHQ==" }, - "ts-essentials": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/ts-essentials/-/ts-essentials-1.0.4.tgz", - "integrity": "sha512-q3N1xS4vZpRouhYHDPwO0bDW3EZ6SK9CrrDHxi/D6BPReSjpVgWIOpLS2o0gSBZm+7q/wyKp6RVM1AeeW7uyfQ==" - }, "ts-node": { "version": "10.8.1", "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.8.1.tgz", @@ -81778,7 +79724,8 @@ "typescript": { "version": "4.7.3", "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.7.3.tgz", - "integrity": "sha512-WOkT3XYvrpXx4vMMqlD+8R8R37fZkjyLGlxavMc4iB8lrl8L0DeTcHbYgw/v0N/z9wAFsgBhcsF0ruoySS22mA==" + "integrity": "sha512-WOkT3XYvrpXx4vMMqlD+8R8R37fZkjyLGlxavMc4iB8lrl8L0DeTcHbYgw/v0N/z9wAFsgBhcsF0ruoySS22mA==", + "devOptional": true }, "uglify-js": { "version": "3.16.0", @@ -82031,7 +79978,8 @@ "universalify": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", - "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==" + "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==", + "dev": true }, "unload": { "version": "2.2.0", @@ -82296,18 +80244,6 @@ } } }, - "url-parse": { - "version": "1.5.9", - "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.9.tgz", - "integrity": "sha512-HpOvhKBvre8wYez+QhHcYiVvVmeF6DVnuSOOPhe3cTum3BnqHhvKaZm8FU5yTiOu/Jut2ZpB2rA/SbBA1JIGlQ==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "querystringify": "^2.1.1", - "requires-port": "^1.0.0" - } - }, "url-parse-lax": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-3.0.0.tgz", @@ -82831,7 +80767,7 @@ "to-regex-range": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", - "integrity": "sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", "dev": true, "optional": true, "requires": { @@ -83039,610 +80975,6 @@ "webpack-log": "^2.0.0" } }, - "webpack-dev-server": { - "version": "3.11.0", - "resolved": "https://registry.npmjs.org/webpack-dev-server/-/webpack-dev-server-3.11.0.tgz", - "integrity": "sha512-PUxZ+oSTxogFQgkTtFndEtJIPNmml7ExwufBZ9L2/Xyyd5PnOL5UreWe5ZT7IU25DSdykL9p1MLQzmLh2ljSeg==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "ansi-html": "0.0.7", - "bonjour": "^3.5.0", - "chokidar": "^2.1.8", - "compression": "^1.7.4", - "connect-history-api-fallback": "^1.6.0", - "debug": "^4.1.1", - "del": "^4.1.1", - "express": "^4.17.1", - "html-entities": "^1.3.1", - "http-proxy-middleware": "0.19.1", - "import-local": "^2.0.0", - "internal-ip": "^4.3.0", - "ip": "^1.1.5", - "is-absolute-url": "^3.0.3", - "killable": "^1.0.1", - "loglevel": "^1.6.8", - "opn": "^5.5.0", - "p-retry": "^3.0.1", - "portfinder": "^1.0.26", - "schema-utils": "^1.0.0", - "selfsigned": "^1.10.7", - "semver": "^6.3.0", - "serve-index": "^1.9.1", - "sockjs": "0.3.20", - "sockjs-client": "1.4.0", - "spdy": "^4.0.2", - "strip-ansi": "^3.0.1", - "supports-color": "^6.1.0", - "url": "^0.11.0", - "webpack-dev-middleware": "^3.7.2", - "webpack-log": "^2.0.0", - "ws": "^6.2.1", - "yargs": "^13.3.2" - }, - "dependencies": { - "ansi-regex": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", - "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", - "dev": true, - "optional": true, - "peer": true - }, - "anymatch": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-2.0.0.tgz", - "integrity": "sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "micromatch": "^3.1.4", - "normalize-path": "^2.1.1" - }, - "dependencies": { - "normalize-path": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.1.1.tgz", - "integrity": "sha1-GrKLVW4Zg2Oowab35vogE3/mrtk=", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "remove-trailing-separator": "^1.0.1" - } - } - } - }, - "binary-extensions": { - "version": "1.13.1", - "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-1.13.1.tgz", - "integrity": "sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw==", - "dev": true, - "optional": true, - "peer": true - }, - "braces": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", - "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "arr-flatten": "^1.1.0", - "array-unique": "^0.3.2", - "extend-shallow": "^2.0.1", - "fill-range": "^4.0.0", - "isobject": "^3.0.1", - "repeat-element": "^1.1.2", - "snapdragon": "^0.8.1", - "snapdragon-node": "^2.0.1", - "split-string": "^3.0.2", - "to-regex": "^3.0.1" - }, - "dependencies": { - "extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "is-extendable": "^0.1.0" - } - } - } - }, - "camelcase": { - "version": "5.3.1", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", - "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", - "dev": true, - "optional": true, - "peer": true - }, - "chokidar": { - "version": "2.1.8", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-2.1.8.tgz", - "integrity": "sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "anymatch": "^2.0.0", - "async-each": "^1.0.1", - "braces": "^2.3.2", - "fsevents": "^1.2.7", - "glob-parent": "^3.1.0", - "inherits": "^2.0.3", - "is-binary-path": "^1.0.0", - "is-glob": "^4.0.0", - "normalize-path": "^3.0.0", - "path-is-absolute": "^1.0.0", - "readdirp": "^2.2.1", - "upath": "^1.1.1" - } - }, - "cliui": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-5.0.0.tgz", - "integrity": "sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "string-width": "^3.1.0", - "strip-ansi": "^5.2.0", - "wrap-ansi": "^5.1.0" - }, - "dependencies": { - "ansi-regex": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", - "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", - "dev": true, - "optional": true, - "peer": true - }, - "strip-ansi": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", - "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "ansi-regex": "^4.1.0" - } - } - } - }, - "emoji-regex": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", - "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", - "dev": true, - "optional": true, - "peer": true - }, - "fill-range": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", - "integrity": "sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc=", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "extend-shallow": "^2.0.1", - "is-number": "^3.0.0", - "repeat-string": "^1.6.1", - "to-regex-range": "^2.1.0" - }, - "dependencies": { - "extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "is-extendable": "^0.1.0" - } - } - } - }, - "find-up": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", - "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "locate-path": "^3.0.0" - } - }, - "fsevents": { - "version": "1.2.13", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-1.2.13.tgz", - "integrity": "sha512-oWb1Z6mkHIskLzEJ/XWX0srkpkTQ7vaopMQkyaEIoq0fmtFVxOthb8cCxeT+p3ynTdkk/RZwbgG4brR5BeWECw==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "bindings": "^1.5.0", - "nan": "^2.12.1" - } - }, - "glob-parent": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-3.1.0.tgz", - "integrity": "sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4=", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "is-glob": "^3.1.0", - "path-dirname": "^1.0.0" - }, - "dependencies": { - "is-glob": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz", - "integrity": "sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo=", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "is-extglob": "^2.1.0" - } - } - } - }, - "import-local": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/import-local/-/import-local-2.0.0.tgz", - "integrity": "sha512-b6s04m3O+s3CGSbqDIyP4R6aAwAeYlVq9+WUWep6iHa8ETRf9yei1U48C5MmfJmV9AiLYYBKPMq/W+/WRpQmCQ==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "pkg-dir": "^3.0.0", - "resolve-cwd": "^2.0.0" - } - }, - "is-binary-path": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-1.0.1.tgz", - "integrity": "sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg=", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "binary-extensions": "^1.0.0" - } - }, - "is-fullwidth-code-point": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", - "dev": true, - "optional": true, - "peer": true - }, - "is-number": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", - "integrity": "sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU=", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "kind-of": "^3.0.2" - }, - "dependencies": { - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "is-buffer": "^1.1.5" - } - } - } - }, - "locate-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", - "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "p-locate": "^3.0.0", - "path-exists": "^3.0.0" - } - }, - "micromatch": { - "version": "3.1.10", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", - "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "arr-diff": "^4.0.0", - "array-unique": "^0.3.2", - "braces": "^2.3.1", - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "extglob": "^2.0.4", - "fragment-cache": "^0.2.1", - "kind-of": "^6.0.2", - "nanomatch": "^1.2.9", - "object.pick": "^1.3.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.2" - } - }, - "p-locate": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", - "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "p-limit": "^2.0.0" - } - }, - "path-exists": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", - "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", - "dev": true, - "optional": true, - "peer": true - }, - "pkg-dir": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-3.0.0.tgz", - "integrity": "sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "find-up": "^3.0.0" - } - }, - "readable-stream": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", - "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "readdirp": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-2.2.1.tgz", - "integrity": "sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "graceful-fs": "^4.1.11", - "micromatch": "^3.1.10", - "readable-stream": "^2.0.2" - } - }, - "resolve-cwd": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-2.0.0.tgz", - "integrity": "sha1-AKn3OHVW4nA46uIyyqNypqWbZlo=", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "resolve-from": "^3.0.0" - } - }, - "resolve-from": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-3.0.0.tgz", - "integrity": "sha1-six699nWiBvItuZTM17rywoYh0g=", - "dev": true, - "optional": true, - "peer": true - }, - "schema-utils": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-1.0.0.tgz", - "integrity": "sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "ajv": "^6.1.0", - "ajv-errors": "^1.0.0", - "ajv-keywords": "^3.1.0" - } - }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "safe-buffer": "~5.1.0" - } - }, - "string-width": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", - "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "emoji-regex": "^7.0.1", - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^5.1.0" - }, - "dependencies": { - "ansi-regex": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", - "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", - "dev": true, - "optional": true, - "peer": true - }, - "strip-ansi": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", - "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "ansi-regex": "^4.1.0" - } - } - } - }, - "strip-ansi": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", - "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "ansi-regex": "^2.0.0" - } - }, - "supports-color": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", - "integrity": "sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "has-flag": "^3.0.0" - } - }, - "to-regex-range": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", - "integrity": "sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "is-number": "^3.0.0", - "repeat-string": "^1.6.1" - } - }, - "wrap-ansi": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", - "integrity": "sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "ansi-styles": "^3.2.0", - "string-width": "^3.0.0", - "strip-ansi": "^5.0.0" - }, - "dependencies": { - "ansi-regex": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", - "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", - "dev": true, - "optional": true, - "peer": true - }, - "strip-ansi": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", - "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "ansi-regex": "^4.1.0" - } - } - } - }, - "ws": { - "version": "6.2.2", - "resolved": "https://registry.npmjs.org/ws/-/ws-6.2.2.tgz", - "integrity": "sha512-zmhltoSR8u1cnDsD43TX59mzoMZsLKqUweyYBAIvTngR3shc0W6aOZylZmq/7hqyVxPdi+5Ud2QInblgyE72fw==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "async-limiter": "~1.0.0" - } - }, - "yargs": { - "version": "13.3.2", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-13.3.2.tgz", - "integrity": "sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "cliui": "^5.0.0", - "find-up": "^3.0.0", - "get-caller-file": "^2.0.1", - "require-directory": "^2.1.1", - "require-main-filename": "^2.0.0", - "set-blocking": "^2.0.0", - "string-width": "^3.0.0", - "which-module": "^2.0.0", - "y18n": "^4.0.0", - "yargs-parser": "^13.1.2" - } - }, - "yargs-parser": { - "version": "13.1.2", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.1.2.tgz", - "integrity": "sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "camelcase": "^5.0.0", - "decamelize": "^1.2.0" - } - } - } - }, "webpack-hot-middleware": { "version": "2.25.1", "resolved": "https://registry.npmjs.org/webpack-hot-middleware/-/webpack-hot-middleware-2.25.1.tgz", @@ -83826,14 +81158,6 @@ "is-symbol": "^1.0.3" } }, - "which-module": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", - "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=", - "dev": true, - "optional": true, - "peer": true - }, "which-pm-runs": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/which-pm-runs/-/which-pm-runs-1.0.0.tgz", diff --git a/airbyte-webapp/package.json b/airbyte-webapp/package.json index 0f201f022f05..a9388df4660c 100644 --- a/airbyte-webapp/package.json +++ b/airbyte-webapp/package.json @@ -1,17 +1,18 @@ { "name": "airbyte-webapp", - "version": "0.39.17-alpha", + "version": "0.39.28-alpha", "private": true, "engines": { "node": ">=16.0.0" }, "scripts": { "start": "react-scripts start", - "build": "react-scripts build", + "build": "BUILD_PATH='./build/app' react-scripts build", "test": "react-scripts test", "test:coverage": "npm test -- --coverage --watchAll=false", "format": "prettier --write 'src/**/*.{ts,tsx}'", - "storybook": "start-storybook -p 9009 -s public --quiet", + "storybook": "start-storybook -p 9009 --quiet", + "build:storybook": "build-storybook -o 'build/storybook'", "lint": "eslint --ext js,ts,tsx src", "license-check": "node ./scripts/license-check.js", "generate-client": "orval", @@ -24,6 +25,7 @@ "@fortawesome/free-solid-svg-icons": "^6.1.1", "@fortawesome/react-fontawesome": "^0.1.18", "@fullstory/browser": "^1.5.1", + "@monaco-editor/react": "^4.4.5", "@sentry/react": "^6.19.6", "@sentry/tracing": "^6.19.6", "classnames": "^2.3.1", @@ -31,6 +33,7 @@ "firebase": "^9.8.2", "flat": "^5.0.2", "formik": "^2.2.9", + "framer-motion": "^6.3.11", "launchdarkly-js-client-sdk": "^2.22.1", "lodash": "^4.17.21", "query-string": "^6.13.1", @@ -41,7 +44,6 @@ "react-intl": "^5.24.8", "react-lazylog": "^4.5.3", "react-markdown": "^7.0.1", - "react-pose": "^4.0.10", "react-query": "^3.39.1", "react-reflex": "^4.0.9", "react-router-dom": "^6.3.0", @@ -92,6 +94,7 @@ "@typescript-eslint/parser": "^5.27.1", "eslint-config-prettier": "^8.5.0", "eslint-config-react-app": "^7.0.1", + "eslint-plugin-css-modules": "^2.11.0", "eslint-plugin-jest": "^26.5.3", "eslint-plugin-prettier": "^4.0.0", "eslint-plugin-unused-imports": "^2.0.0", diff --git a/airbyte-webapp/scripts/validate-links.ts b/airbyte-webapp/scripts/validate-links.ts index d2be285cde85..fa6da3de29cc 100644 --- a/airbyte-webapp/scripts/validate-links.ts +++ b/airbyte-webapp/scripts/validate-links.ts @@ -8,16 +8,21 @@ async function run() { // Query all domains and wait for results const results = await Promise.allSettled( Object.entries(links).map(([key, url]) => { - return fetch(url, { headers: { "user-agent": "ValidateLinksCheck" } }).then((resp) => { - if (resp.status >= 200 && resp.status < 300) { - // Only URLs returning a 200 status code are considered okay - console.log(`āœ“ [${key}] ${url} returned HTTP ${resp.status}`); - } else { - // Everything else should fail this test - console.error(`X [${key}] ${url} returned HTTP ${resp.status}`); + return fetch(url, { headers: { "user-agent": "ValidateLinksCheck" } }) + .then((resp) => { + if (resp.status >= 200 && resp.status < 300) { + // Only URLs returning a 200 status code are considered okay + console.log(`āœ“ [${key}] ${url} returned HTTP ${resp.status}`); + } else { + // Everything else should fail this test + console.error(`X [${key}] ${url} returned HTTP ${resp.status}`); + return Promise.reject({ key, url }); + } + }) + .catch((reason) => { + console.error(`X [${key}] ${url} error fetching: ${String(reason)}`); return Promise.reject({ key, url }); - } - }); + }); }) ); diff --git a/airbyte-webapp/src/components/ArrayOfObjectsEditor/ArrayOfObjectsEditor.tsx b/airbyte-webapp/src/components/ArrayOfObjectsEditor/ArrayOfObjectsEditor.tsx index 6e40572339a1..e1ace8fcbdda 100644 --- a/airbyte-webapp/src/components/ArrayOfObjectsEditor/ArrayOfObjectsEditor.tsx +++ b/airbyte-webapp/src/components/ArrayOfObjectsEditor/ArrayOfObjectsEditor.tsx @@ -42,7 +42,7 @@ export interface ArrayOfObjectsEditorProps { disabled?: boolean; } -export function ArrayOfObjectsEditor({ +export const ArrayOfObjectsEditor = ({ onStartEdit, onDone, onRemove, @@ -54,7 +54,7 @@ export function ArrayOfObjectsEditor): JSX.Element { +}: ArrayOfObjectsEditorProps): JSX.Element => { const onAddItem = React.useCallback(() => onStartEdit(items.length), [onStartEdit, items]); const isEditable = editableItemIndex !== null && editableItemIndex !== undefined; @@ -108,4 +108,4 @@ export function ArrayOfObjectsEditor ); -} +}; diff --git a/airbyte-webapp/src/components/BarChart/BarChart.tsx b/airbyte-webapp/src/components/BarChart/BarChart.tsx index 41561da351ab..b0a934de7c46 100644 --- a/airbyte-webapp/src/components/BarChart/BarChart.tsx +++ b/airbyte-webapp/src/components/BarChart/BarChart.tsx @@ -1,20 +1,30 @@ import React, { useMemo } from "react"; -import { Bar, BarChart as BasicBarChart, CartesianGrid, Label, ResponsiveContainer, XAxis, YAxis } from "recharts"; +import { + Bar, + BarChart as BasicBarChart, + CartesianGrid, + Label, + ResponsiveContainer, + XAxis, + YAxis, + Tooltip, +} from "recharts"; import { barChartColors, theme } from "theme"; interface BarChartProps { - data: { + data: Array<{ name: string; value: number; - }[]; + }>; legendLabels: string[]; xLabel?: string; yLabel?: string; } const BarChart: React.FC = ({ data, legendLabels, xLabel, yLabel }) => { - const chartLinesColor = theme.greyColor20; - const chartTicksColor = theme.lightTextColor; + const chartLinesColor = theme.grey100; + const chartTicksColor = theme.grey; + const chartHoverFill = theme.grey100; const width = useMemo( () => Math.min(Math.max([...data].sort((a, b) => b.value - a.value)[0].value.toFixed(0).length * 10, 80), 130), @@ -53,6 +63,7 @@ const BarChart: React.FC = ({ data, legendLabels, xLabel, yLabel >